gwan-pdf-engine 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,13 @@
1
+ import { S as StorageAdapter } from '../../index-Dasg2QQ6.mjs';
2
+
3
+ /**
4
+ * GCP Cloud Storage adapter.
5
+ * Requires @google-cloud/storage to be installed (optional peer dependency).
6
+ *
7
+ * Usage:
8
+ * import { createGcpAdapter } from '@gwan/pdf-engine/adapters/gcp'
9
+ * const storage = createGcpAdapter('my-bucket')
10
+ */
11
+ declare function createGcpAdapter(bucketName: string): StorageAdapter;
12
+
13
+ export { createGcpAdapter };
@@ -0,0 +1,13 @@
1
+ import { S as StorageAdapter } from '../../index-Dasg2QQ6.js';
2
+
3
+ /**
4
+ * GCP Cloud Storage adapter.
5
+ * Requires @google-cloud/storage to be installed (optional peer dependency).
6
+ *
7
+ * Usage:
8
+ * import { createGcpAdapter } from '@gwan/pdf-engine/adapters/gcp'
9
+ * const storage = createGcpAdapter('my-bucket')
10
+ */
11
+ declare function createGcpAdapter(bucketName: string): StorageAdapter;
12
+
13
+ export { createGcpAdapter };
@@ -0,0 +1,60 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/adapters/storage/gcp.ts
21
+ var gcp_exports = {};
22
+ __export(gcp_exports, {
23
+ createGcpAdapter: () => createGcpAdapter
24
+ });
25
+ module.exports = __toCommonJS(gcp_exports);
26
+ function createGcpAdapter(bucketName) {
27
+ let _storage;
28
+ let _bucket;
29
+ function getBucket() {
30
+ if (!_bucket) {
31
+ try {
32
+ const { Storage } = require("@google-cloud/storage");
33
+ _storage = new Storage();
34
+ _bucket = _storage.bucket(bucketName);
35
+ } catch {
36
+ throw new Error(
37
+ "@google-cloud/storage is not installed. Run: npm install @google-cloud/storage"
38
+ );
39
+ }
40
+ }
41
+ return _bucket;
42
+ }
43
+ return {
44
+ async download(remotePath) {
45
+ const bucket = getBucket();
46
+ const [buffer] = await bucket.file(remotePath).download();
47
+ return buffer;
48
+ },
49
+ async upload(buffer, remotePath) {
50
+ const bucket = getBucket();
51
+ const file = bucket.file(remotePath);
52
+ await file.save(buffer, { resumable: false });
53
+ return `https://storage.googleapis.com/${bucketName}/${remotePath}`;
54
+ }
55
+ };
56
+ }
57
+ // Annotate the CommonJS export names for ESM import in node:
58
+ 0 && (module.exports = {
59
+ createGcpAdapter
60
+ });
@@ -0,0 +1,39 @@
1
+ import {
2
+ __require
3
+ } from "../../chunk-Y6FXYEAI.mjs";
4
+
5
+ // src/adapters/storage/gcp.ts
6
+ function createGcpAdapter(bucketName) {
7
+ let _storage;
8
+ let _bucket;
9
+ function getBucket() {
10
+ if (!_bucket) {
11
+ try {
12
+ const { Storage } = __require("@google-cloud/storage");
13
+ _storage = new Storage();
14
+ _bucket = _storage.bucket(bucketName);
15
+ } catch {
16
+ throw new Error(
17
+ "@google-cloud/storage is not installed. Run: npm install @google-cloud/storage"
18
+ );
19
+ }
20
+ }
21
+ return _bucket;
22
+ }
23
+ return {
24
+ async download(remotePath) {
25
+ const bucket = getBucket();
26
+ const [buffer] = await bucket.file(remotePath).download();
27
+ return buffer;
28
+ },
29
+ async upload(buffer, remotePath) {
30
+ const bucket = getBucket();
31
+ const file = bucket.file(remotePath);
32
+ await file.save(buffer, { resumable: false });
33
+ return `https://storage.googleapis.com/${bucketName}/${remotePath}`;
34
+ }
35
+ };
36
+ }
37
+ export {
38
+ createGcpAdapter
39
+ };
@@ -0,0 +1,9 @@
1
+ import { S as StorageAdapter } from '../../index-Dasg2QQ6.mjs';
2
+
3
+ /**
4
+ * Local filesystem storage adapter — for development and testing.
5
+ * All paths are resolved relative to basePath.
6
+ */
7
+ declare function createLocalAdapter(basePath: string): StorageAdapter;
8
+
9
+ export { createLocalAdapter };
@@ -0,0 +1,9 @@
1
+ import { S as StorageAdapter } from '../../index-Dasg2QQ6.js';
2
+
3
+ /**
4
+ * Local filesystem storage adapter — for development and testing.
5
+ * All paths are resolved relative to basePath.
6
+ */
7
+ declare function createLocalAdapter(basePath: string): StorageAdapter;
8
+
9
+ export { createLocalAdapter };
@@ -0,0 +1,45 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/adapters/storage/local.ts
21
+ var local_exports = {};
22
+ __export(local_exports, {
23
+ createLocalAdapter: () => createLocalAdapter
24
+ });
25
+ module.exports = __toCommonJS(local_exports);
26
+ var import_promises = require("fs/promises");
27
+ var import_path = require("path");
28
+ function createLocalAdapter(basePath) {
29
+ return {
30
+ async download(remotePath) {
31
+ const fullPath = (0, import_path.join)(basePath, remotePath);
32
+ return (0, import_promises.readFile)(fullPath);
33
+ },
34
+ async upload(buffer, remotePath) {
35
+ const fullPath = (0, import_path.join)(basePath, remotePath);
36
+ await (0, import_promises.mkdir)((0, import_path.dirname)(fullPath), { recursive: true });
37
+ await (0, import_promises.writeFile)(fullPath, buffer);
38
+ return `file://${fullPath}`;
39
+ }
40
+ };
41
+ }
42
+ // Annotate the CommonJS export names for ESM import in node:
43
+ 0 && (module.exports = {
44
+ createLocalAdapter
45
+ });
@@ -0,0 +1,22 @@
1
+ import "../../chunk-Y6FXYEAI.mjs";
2
+
3
+ // src/adapters/storage/local.ts
4
+ import { readFile, writeFile, mkdir } from "fs/promises";
5
+ import { dirname, join } from "path";
6
+ function createLocalAdapter(basePath) {
7
+ return {
8
+ async download(remotePath) {
9
+ const fullPath = join(basePath, remotePath);
10
+ return readFile(fullPath);
11
+ },
12
+ async upload(buffer, remotePath) {
13
+ const fullPath = join(basePath, remotePath);
14
+ await mkdir(dirname(fullPath), { recursive: true });
15
+ await writeFile(fullPath, buffer);
16
+ return `file://${fullPath}`;
17
+ }
18
+ };
19
+ }
20
+ export {
21
+ createLocalAdapter
22
+ };
@@ -0,0 +1,10 @@
1
+ var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
2
+ get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
3
+ }) : x)(function(x) {
4
+ if (typeof require !== "undefined") return require.apply(this, arguments);
5
+ throw Error('Dynamic require of "' + x + '" is not supported');
6
+ });
7
+
8
+ export {
9
+ __require
10
+ };
@@ -0,0 +1,41 @@
1
+ interface StorageAdapter {
2
+ download(remotePath: string): Promise<Buffer>;
3
+ upload(buffer: Buffer, remotePath: string): Promise<string>;
4
+ }
5
+ type BookSize = '6x9' | '7x9' | '5x8' | '8.5x11';
6
+ type AssetType = 'cover' | 'template' | 'back_cover';
7
+ interface AssetInput {
8
+ path: string;
9
+ type: AssetType;
10
+ repeatCount?: number;
11
+ }
12
+ interface BookConfig {
13
+ size: BookSize;
14
+ bleed: boolean;
15
+ assets: AssetInput[];
16
+ outputPath: string;
17
+ pressReady?: boolean;
18
+ }
19
+ interface PdfOutput {
20
+ buffer: Buffer;
21
+ url: string;
22
+ pageCount: number;
23
+ }
24
+ interface ValidationReport {
25
+ valid: boolean;
26
+ colorSpace: 'CMYK' | 'RGB' | 'unknown';
27
+ dpi: number;
28
+ hasBleed: boolean;
29
+ warnings: string[];
30
+ }
31
+ interface PageNumberOptions {
32
+ startPage?: number;
33
+ position?: 'bottom-center' | 'bottom-right' | 'bottom-left';
34
+ fontSize?: number;
35
+ }
36
+ interface SplitRange {
37
+ start: number;
38
+ end: number;
39
+ }
40
+
41
+ export type { AssetInput as A, BookConfig as B, PdfOutput as P, StorageAdapter as S, ValidationReport as V, AssetType as a, BookSize as b, PageNumberOptions as c, SplitRange as d };
@@ -0,0 +1,41 @@
1
+ interface StorageAdapter {
2
+ download(remotePath: string): Promise<Buffer>;
3
+ upload(buffer: Buffer, remotePath: string): Promise<string>;
4
+ }
5
+ type BookSize = '6x9' | '7x9' | '5x8' | '8.5x11';
6
+ type AssetType = 'cover' | 'template' | 'back_cover';
7
+ interface AssetInput {
8
+ path: string;
9
+ type: AssetType;
10
+ repeatCount?: number;
11
+ }
12
+ interface BookConfig {
13
+ size: BookSize;
14
+ bleed: boolean;
15
+ assets: AssetInput[];
16
+ outputPath: string;
17
+ pressReady?: boolean;
18
+ }
19
+ interface PdfOutput {
20
+ buffer: Buffer;
21
+ url: string;
22
+ pageCount: number;
23
+ }
24
+ interface ValidationReport {
25
+ valid: boolean;
26
+ colorSpace: 'CMYK' | 'RGB' | 'unknown';
27
+ dpi: number;
28
+ hasBleed: boolean;
29
+ warnings: string[];
30
+ }
31
+ interface PageNumberOptions {
32
+ startPage?: number;
33
+ position?: 'bottom-center' | 'bottom-right' | 'bottom-left';
34
+ fontSize?: number;
35
+ }
36
+ interface SplitRange {
37
+ start: number;
38
+ end: number;
39
+ }
40
+
41
+ export type { AssetInput as A, BookConfig as B, PdfOutput as P, StorageAdapter as S, ValidationReport as V, AssetType as a, BookSize as b, PageNumberOptions as c, SplitRange as d };
@@ -0,0 +1,41 @@
1
+ interface StorageAdapter {
2
+ download(remotePath: string): Promise<Buffer>;
3
+ upload(buffer: Buffer, remotePath: string): Promise<string>;
4
+ }
5
+ type BookSize = '6x9' | '5x8' | '8.5x11';
6
+ type AssetType = 'cover' | 'template' | 'back_cover';
7
+ interface AssetInput {
8
+ path: string;
9
+ type: AssetType;
10
+ repeatCount?: number;
11
+ }
12
+ interface BookConfig {
13
+ size: BookSize;
14
+ bleed: boolean;
15
+ assets: AssetInput[];
16
+ outputPath: string;
17
+ pressReady?: boolean;
18
+ }
19
+ interface PdfOutput {
20
+ buffer: Buffer;
21
+ url: string;
22
+ pageCount: number;
23
+ }
24
+ interface ValidationReport {
25
+ valid: boolean;
26
+ colorSpace: 'CMYK' | 'RGB' | 'unknown';
27
+ dpi: number;
28
+ hasBleed: boolean;
29
+ warnings: string[];
30
+ }
31
+ interface PageNumberOptions {
32
+ startPage?: number;
33
+ position?: 'bottom-center' | 'bottom-right' | 'bottom-left';
34
+ fontSize?: number;
35
+ }
36
+ interface SplitRange {
37
+ start: number;
38
+ end: number;
39
+ }
40
+
41
+ export type { AssetInput as A, BookConfig as B, PdfOutput as P, StorageAdapter as S, ValidationReport as V, AssetType as a, BookSize as b, PageNumberOptions as c, SplitRange as d };
@@ -0,0 +1,41 @@
1
+ interface StorageAdapter {
2
+ download(remotePath: string): Promise<Buffer>;
3
+ upload(buffer: Buffer, remotePath: string): Promise<string>;
4
+ }
5
+ type BookSize = '6x9' | '5x8' | '8.5x11';
6
+ type AssetType = 'cover' | 'template' | 'back_cover';
7
+ interface AssetInput {
8
+ path: string;
9
+ type: AssetType;
10
+ repeatCount?: number;
11
+ }
12
+ interface BookConfig {
13
+ size: BookSize;
14
+ bleed: boolean;
15
+ assets: AssetInput[];
16
+ outputPath: string;
17
+ pressReady?: boolean;
18
+ }
19
+ interface PdfOutput {
20
+ buffer: Buffer;
21
+ url: string;
22
+ pageCount: number;
23
+ }
24
+ interface ValidationReport {
25
+ valid: boolean;
26
+ colorSpace: 'CMYK' | 'RGB' | 'unknown';
27
+ dpi: number;
28
+ hasBleed: boolean;
29
+ warnings: string[];
30
+ }
31
+ interface PageNumberOptions {
32
+ startPage?: number;
33
+ position?: 'bottom-center' | 'bottom-right' | 'bottom-left';
34
+ fontSize?: number;
35
+ }
36
+ interface SplitRange {
37
+ start: number;
38
+ end: number;
39
+ }
40
+
41
+ export type { AssetInput as A, BookConfig as B, PdfOutput as P, StorageAdapter as S, ValidationReport as V, AssetType as a, BookSize as b, PageNumberOptions as c, SplitRange as d };
@@ -0,0 +1,54 @@
1
+ import { S as StorageAdapter, B as BookConfig, P as PdfOutput, V as ValidationReport } from './index-Dasg2QQ6.mjs';
2
+ export { A as AssetInput, a as AssetType, b as BookSize, c as PageNumberOptions, d as SplitRange } from './index-Dasg2QQ6.mjs';
3
+
4
+ declare class PdfForge {
5
+ private storage;
6
+ constructor(storage: StorageAdapter);
7
+ /**
8
+ * Assembles a press-ready PDF book from a list of CMYK image assets.
9
+ * Assets are downloaded from storage, assembled in order (respecting repeatCount),
10
+ * and the result is uploaded back to storage.
11
+ */
12
+ buildBook(config: BookConfig): Promise<PdfOutput>;
13
+ /**
14
+ * Downloads PDFs from storage, merges them into a single PDF,
15
+ * uploads the result to the last path's directory as 'merged.pdf',
16
+ * and returns the output.
17
+ */
18
+ mergePdfs(paths: string[], outputPath: string): Promise<PdfOutput>;
19
+ /**
20
+ * Downloads a PDF from storage, splits it into page ranges,
21
+ * uploads each part, and returns an array of PdfOutputs.
22
+ */
23
+ splitPdf(path: string, ranges: Array<[number, number]>, outputBasePath: string): Promise<PdfOutput[]>;
24
+ /**
25
+ * Downloads a PDF, compresses it, uploads the result, and returns the output.
26
+ */
27
+ compress(path: string, outputPath: string): Promise<PdfOutput>;
28
+ /**
29
+ * Downloads an asset (image or PDF) and validates CMYK color space, DPI, and bleed.
30
+ */
31
+ validate(path: string): Promise<ValidationReport>;
32
+ }
33
+
34
+ declare function validateAsset(buffer: Buffer): Promise<ValidationReport>;
35
+
36
+ declare function mergePdfs(buffers: Buffer[], outputPath: string, storage: StorageAdapter): Promise<PdfOutput>;
37
+
38
+ /**
39
+ * Splits a PDF into parts defined by inclusive [start, end] page ranges (0-indexed).
40
+ * Each part is uploaded to `${outputBasePath}/part-N.pdf`.
41
+ */
42
+ declare function splitPdf(buffer: Buffer, ranges: Array<[number, number]>, outputBasePath: string, storage: StorageAdapter): Promise<PdfOutput[]>;
43
+
44
+ /**
45
+ * Re-saves the PDF with object streams enabled (pdf-lib's built-in compression).
46
+ * For deeper compression, pass pressReady: true to use Ghostscript (requires gs installed).
47
+ *
48
+ * Note: Object stream compression reduces cross-reference table size but does not
49
+ * re-compress already-embedded image data. Ghostscript post-processing is needed
50
+ * for full image resampling/compression.
51
+ */
52
+ declare function compressPdf(buffer: Buffer, outputPath: string, storage: StorageAdapter): Promise<PdfOutput>;
53
+
54
+ export { BookConfig, PdfForge, PdfOutput, StorageAdapter, ValidationReport, compressPdf, mergePdfs, splitPdf, validateAsset };
@@ -0,0 +1,54 @@
1
+ import { S as StorageAdapter, B as BookConfig, P as PdfOutput, V as ValidationReport } from './index-Dasg2QQ6.js';
2
+ export { A as AssetInput, a as AssetType, b as BookSize, c as PageNumberOptions, d as SplitRange } from './index-Dasg2QQ6.js';
3
+
4
+ declare class PdfForge {
5
+ private storage;
6
+ constructor(storage: StorageAdapter);
7
+ /**
8
+ * Assembles a press-ready PDF book from a list of CMYK image assets.
9
+ * Assets are downloaded from storage, assembled in order (respecting repeatCount),
10
+ * and the result is uploaded back to storage.
11
+ */
12
+ buildBook(config: BookConfig): Promise<PdfOutput>;
13
+ /**
14
+ * Downloads PDFs from storage, merges them into a single PDF,
15
+ * uploads the result to the last path's directory as 'merged.pdf',
16
+ * and returns the output.
17
+ */
18
+ mergePdfs(paths: string[], outputPath: string): Promise<PdfOutput>;
19
+ /**
20
+ * Downloads a PDF from storage, splits it into page ranges,
21
+ * uploads each part, and returns an array of PdfOutputs.
22
+ */
23
+ splitPdf(path: string, ranges: Array<[number, number]>, outputBasePath: string): Promise<PdfOutput[]>;
24
+ /**
25
+ * Downloads a PDF, compresses it, uploads the result, and returns the output.
26
+ */
27
+ compress(path: string, outputPath: string): Promise<PdfOutput>;
28
+ /**
29
+ * Downloads an asset (image or PDF) and validates CMYK color space, DPI, and bleed.
30
+ */
31
+ validate(path: string): Promise<ValidationReport>;
32
+ }
33
+
34
+ declare function validateAsset(buffer: Buffer): Promise<ValidationReport>;
35
+
36
+ declare function mergePdfs(buffers: Buffer[], outputPath: string, storage: StorageAdapter): Promise<PdfOutput>;
37
+
38
+ /**
39
+ * Splits a PDF into parts defined by inclusive [start, end] page ranges (0-indexed).
40
+ * Each part is uploaded to `${outputBasePath}/part-N.pdf`.
41
+ */
42
+ declare function splitPdf(buffer: Buffer, ranges: Array<[number, number]>, outputBasePath: string, storage: StorageAdapter): Promise<PdfOutput[]>;
43
+
44
+ /**
45
+ * Re-saves the PDF with object streams enabled (pdf-lib's built-in compression).
46
+ * For deeper compression, pass pressReady: true to use Ghostscript (requires gs installed).
47
+ *
48
+ * Note: Object stream compression reduces cross-reference table size but does not
49
+ * re-compress already-embedded image data. Ghostscript post-processing is needed
50
+ * for full image resampling/compression.
51
+ */
52
+ declare function compressPdf(buffer: Buffer, outputPath: string, storage: StorageAdapter): Promise<PdfOutput>;
53
+
54
+ export { BookConfig, PdfForge, PdfOutput, StorageAdapter, ValidationReport, compressPdf, mergePdfs, splitPdf, validateAsset };
package/dist/index.js ADDED
@@ -0,0 +1,304 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __export = (target, all) => {
9
+ for (var name in all)
10
+ __defProp(target, name, { get: all[name], enumerable: true });
11
+ };
12
+ var __copyProps = (to, from, except, desc) => {
13
+ if (from && typeof from === "object" || typeof from === "function") {
14
+ for (let key of __getOwnPropNames(from))
15
+ if (!__hasOwnProp.call(to, key) && key !== except)
16
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
17
+ }
18
+ return to;
19
+ };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
28
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
+
30
+ // src/index.ts
31
+ var index_exports = {};
32
+ __export(index_exports, {
33
+ PdfForge: () => PdfForge,
34
+ compressPdf: () => compressPdf,
35
+ mergePdfs: () => mergePdfs,
36
+ splitPdf: () => splitPdf,
37
+ validateAsset: () => validateAsset
38
+ });
39
+ module.exports = __toCommonJS(index_exports);
40
+
41
+ // src/assembler/buildBook.ts
42
+ var import_pdf_lib2 = require("pdf-lib");
43
+
44
+ // src/assembler/pageLayout.ts
45
+ var BLEED_PT = 9;
46
+ var TRIM_SIZES = {
47
+ "6x9": { width: 432, height: 648 },
48
+ "7x9": { width: 504, height: 648 },
49
+ "5x8": { width: 360, height: 576 },
50
+ "8.5x11": { width: 612, height: 792 }
51
+ };
52
+ function getPageDimensions(size, bleed) {
53
+ const trim = TRIM_SIZES[size];
54
+ const extra = bleed ? BLEED_PT * 2 : 0;
55
+ return {
56
+ width: trim.width + extra,
57
+ height: trim.height + extra,
58
+ bleedPt: bleed ? BLEED_PT : 0
59
+ };
60
+ }
61
+
62
+ // src/cmyk/embedCmyk.ts
63
+ var import_pdf_lib = require("pdf-lib");
64
+ var import_sharp = __toESM(require("sharp"));
65
+ async function embedImage(pdfDoc, page, buffer, pageWidth, pageHeight) {
66
+ const meta = await (0, import_sharp.default)(buffer).metadata();
67
+ const width = meta.width;
68
+ const height = meta.height;
69
+ let imageBytes;
70
+ let colorSpace;
71
+ if (meta.format === "jpeg") {
72
+ imageBytes = buffer;
73
+ if (meta.channels === 4) colorSpace = "DeviceCMYK";
74
+ else if (meta.channels === 1) colorSpace = "DeviceGray";
75
+ else colorSpace = "DeviceRGB";
76
+ } else {
77
+ imageBytes = await (0, import_sharp.default)(buffer).jpeg({ quality: 90 }).toBuffer();
78
+ colorSpace = "DeviceRGB";
79
+ }
80
+ const context = pdfDoc.context;
81
+ const imageStream = context.stream(imageBytes, {
82
+ Type: "XObject",
83
+ Subtype: "Image",
84
+ Width: width,
85
+ Height: height,
86
+ ColorSpace: colorSpace,
87
+ BitsPerComponent: 8,
88
+ Filter: "DCTDecode",
89
+ Length: imageBytes.length
90
+ });
91
+ const imageRef = context.register(imageStream);
92
+ const imageName = import_pdf_lib.PDFName.of(`Img${Date.now()}`);
93
+ const pageNode = page.node;
94
+ let resources = pageNode.get(import_pdf_lib.PDFName.of("Resources"));
95
+ if (!resources) {
96
+ pageNode.set(import_pdf_lib.PDFName.of("Resources"), context.obj({}));
97
+ resources = pageNode.get(import_pdf_lib.PDFName.of("Resources"));
98
+ }
99
+ let xObject = resources.get(import_pdf_lib.PDFName.of("XObject"));
100
+ if (!xObject) {
101
+ resources.set(import_pdf_lib.PDFName.of("XObject"), context.obj({}));
102
+ xObject = resources.get(import_pdf_lib.PDFName.of("XObject"));
103
+ }
104
+ xObject.set(imageName, imageRef);
105
+ const saveState = import_pdf_lib.PDFOperator.of("q");
106
+ const transform = import_pdf_lib.PDFOperator.of("cm", [
107
+ import_pdf_lib.PDFNumber.of(pageWidth),
108
+ import_pdf_lib.PDFNumber.of(0),
109
+ import_pdf_lib.PDFNumber.of(0),
110
+ import_pdf_lib.PDFNumber.of(pageHeight),
111
+ import_pdf_lib.PDFNumber.of(0),
112
+ import_pdf_lib.PDFNumber.of(0)
113
+ ]);
114
+ const drawImage = import_pdf_lib.PDFOperator.of("Do", [imageName]);
115
+ const restoreState = import_pdf_lib.PDFOperator.of("Q");
116
+ page.pushOperators(saveState, transform, drawImage, restoreState);
117
+ }
118
+
119
+ // src/assembler/buildBook.ts
120
+ function expandAssets(assets) {
121
+ const expanded = [];
122
+ for (const asset of assets) {
123
+ const count = asset.repeatCount ?? 1;
124
+ for (let i = 0; i < count; i++) {
125
+ expanded.push({ ...asset, repeatCount: 1 });
126
+ }
127
+ }
128
+ return expanded;
129
+ }
130
+ async function buildBook(config, storage) {
131
+ const pdfDoc = await import_pdf_lib2.PDFDocument.create();
132
+ const { width, height } = getPageDimensions(config.size, config.bleed);
133
+ const pages = expandAssets(config.assets);
134
+ for (const asset of pages) {
135
+ const buffer = await storage.download(asset.path);
136
+ const page = pdfDoc.addPage([width, height]);
137
+ await embedImage(pdfDoc, page, buffer, width, height);
138
+ }
139
+ const pdfBytes = await pdfDoc.save();
140
+ const pdfBuffer = Buffer.from(pdfBytes);
141
+ const url = await storage.upload(pdfBuffer, config.outputPath);
142
+ return {
143
+ buffer: pdfBuffer,
144
+ url,
145
+ pageCount: pdfDoc.getPageCount()
146
+ };
147
+ }
148
+
149
+ // src/validator/validateAsset.ts
150
+ var import_sharp2 = __toESM(require("sharp"));
151
+ var MIN_DPI = 300;
152
+ function mapColorSpace(space) {
153
+ if (!space) return "unknown";
154
+ if (space === "cmyk") return "CMYK";
155
+ if (space === "srgb" || space === "rgb") return "RGB";
156
+ return "unknown";
157
+ }
158
+ async function validateAsset(buffer) {
159
+ const meta = await (0, import_sharp2.default)(buffer).metadata();
160
+ const warnings = [];
161
+ const colorSpace = mapColorSpace(meta.space);
162
+ const dpi = meta.density ?? 0;
163
+ const width = meta.width ?? 0;
164
+ const height = meta.height ?? 0;
165
+ if (colorSpace !== "CMYK") {
166
+ warnings.push(`Color space is ${colorSpace}, expected CMYK. This file may not print correctly.`);
167
+ }
168
+ if (dpi < MIN_DPI) {
169
+ warnings.push(`DPI is ${dpi}, minimum recommended is ${MIN_DPI} for print.`);
170
+ }
171
+ const hasBleed = width > 0 && height > 0 ? checkHasBleed(width, height, dpi) : false;
172
+ if (!hasBleed) {
173
+ warnings.push("Image does not appear to include a bleed margin (0.125 inch). Consider adding bleed for press-ready output.");
174
+ }
175
+ return {
176
+ valid: warnings.length === 0,
177
+ colorSpace,
178
+ dpi,
179
+ hasBleed,
180
+ warnings
181
+ };
182
+ }
183
+ function checkHasBleed(width, height, dpi) {
184
+ if (dpi === 0) return false;
185
+ const bleedPx = Math.round(0.125 / 1 * dpi);
186
+ const trimSizes = [
187
+ { w: 6, h: 9 },
188
+ { w: 5, h: 8 },
189
+ { w: 8.5, h: 11 }
190
+ ];
191
+ for (const trim of trimSizes) {
192
+ const trimW = Math.round(trim.w * dpi);
193
+ const trimH = Math.round(trim.h * dpi);
194
+ const expectedW = trimW + bleedPx * 2;
195
+ const expectedH = trimH + bleedPx * 2;
196
+ const withinW = Math.abs(width - expectedW) <= expectedW * 0.05;
197
+ const withinH = Math.abs(height - expectedH) <= expectedH * 0.05;
198
+ if (withinW && withinH) return true;
199
+ }
200
+ return false;
201
+ }
202
+
203
+ // src/transformer/mergePdfs.ts
204
+ var import_pdf_lib3 = require("pdf-lib");
205
+ async function mergePdfs(buffers, outputPath, storage) {
206
+ const merged = await import_pdf_lib3.PDFDocument.create();
207
+ for (const buf of buffers) {
208
+ const src = await import_pdf_lib3.PDFDocument.load(buf);
209
+ const pageIndices = src.getPageIndices();
210
+ const copiedPages = await merged.copyPages(src, pageIndices);
211
+ copiedPages.forEach((p) => merged.addPage(p));
212
+ }
213
+ const pdfBytes = await merged.save();
214
+ const pdfBuffer = Buffer.from(pdfBytes);
215
+ const url = await storage.upload(pdfBuffer, outputPath);
216
+ return { buffer: pdfBuffer, url, pageCount: merged.getPageCount() };
217
+ }
218
+
219
+ // src/transformer/splitPdf.ts
220
+ var import_pdf_lib4 = require("pdf-lib");
221
+ async function splitPdf(buffer, ranges, outputBasePath, storage) {
222
+ const src = await import_pdf_lib4.PDFDocument.load(buffer);
223
+ const results = [];
224
+ for (let i = 0; i < ranges.length; i++) {
225
+ const [start, end] = ranges[i];
226
+ const part = await import_pdf_lib4.PDFDocument.create();
227
+ const pageIndices = Array.from(
228
+ { length: end - start + 1 },
229
+ (_, idx) => start + idx
230
+ );
231
+ const copiedPages = await part.copyPages(src, pageIndices);
232
+ copiedPages.forEach((p) => part.addPage(p));
233
+ const pdfBytes = await part.save();
234
+ const pdfBuffer = Buffer.from(pdfBytes);
235
+ const outputPath = `${outputBasePath}/part-${i + 1}.pdf`;
236
+ const url = await storage.upload(pdfBuffer, outputPath);
237
+ results.push({ buffer: pdfBuffer, url, pageCount: part.getPageCount() });
238
+ }
239
+ return results;
240
+ }
241
+
242
+ // src/transformer/compressPdf.ts
243
+ var import_pdf_lib5 = require("pdf-lib");
244
+ async function compressPdf(buffer, outputPath, storage) {
245
+ const pdfDoc = await import_pdf_lib5.PDFDocument.load(buffer);
246
+ const pdfBytes = await pdfDoc.save({ useObjectStreams: true });
247
+ const pdfBuffer = Buffer.from(pdfBytes);
248
+ const url = await storage.upload(pdfBuffer, outputPath);
249
+ return { buffer: pdfBuffer, url, pageCount: pdfDoc.getPageCount() };
250
+ }
251
+
252
+ // src/assembler/PdfForge.ts
253
+ var PdfForge = class {
254
+ constructor(storage) {
255
+ this.storage = storage;
256
+ }
257
+ /**
258
+ * Assembles a press-ready PDF book from a list of CMYK image assets.
259
+ * Assets are downloaded from storage, assembled in order (respecting repeatCount),
260
+ * and the result is uploaded back to storage.
261
+ */
262
+ async buildBook(config) {
263
+ return buildBook(config, this.storage);
264
+ }
265
+ /**
266
+ * Downloads PDFs from storage, merges them into a single PDF,
267
+ * uploads the result to the last path's directory as 'merged.pdf',
268
+ * and returns the output.
269
+ */
270
+ async mergePdfs(paths, outputPath) {
271
+ const buffers = await Promise.all(paths.map((p) => this.storage.download(p)));
272
+ return mergePdfs(buffers, outputPath, this.storage);
273
+ }
274
+ /**
275
+ * Downloads a PDF from storage, splits it into page ranges,
276
+ * uploads each part, and returns an array of PdfOutputs.
277
+ */
278
+ async splitPdf(path, ranges, outputBasePath) {
279
+ const buffer = await this.storage.download(path);
280
+ return splitPdf(buffer, ranges, outputBasePath, this.storage);
281
+ }
282
+ /**
283
+ * Downloads a PDF, compresses it, uploads the result, and returns the output.
284
+ */
285
+ async compress(path, outputPath) {
286
+ const buffer = await this.storage.download(path);
287
+ return compressPdf(buffer, outputPath, this.storage);
288
+ }
289
+ /**
290
+ * Downloads an asset (image or PDF) and validates CMYK color space, DPI, and bleed.
291
+ */
292
+ async validate(path) {
293
+ const buffer = await this.storage.download(path);
294
+ return validateAsset(buffer);
295
+ }
296
+ };
297
+ // Annotate the CommonJS export names for ESM import in node:
298
+ 0 && (module.exports = {
299
+ PdfForge,
300
+ compressPdf,
301
+ mergePdfs,
302
+ splitPdf,
303
+ validateAsset
304
+ });
package/dist/index.mjs ADDED
@@ -0,0 +1,265 @@
1
+ import "./chunk-Y6FXYEAI.mjs";
2
+
3
+ // src/assembler/buildBook.ts
4
+ import { PDFDocument as PDFDocument2 } from "pdf-lib";
5
+
6
+ // src/assembler/pageLayout.ts
7
+ var BLEED_PT = 9;
8
+ var TRIM_SIZES = {
9
+ "6x9": { width: 432, height: 648 },
10
+ "7x9": { width: 504, height: 648 },
11
+ "5x8": { width: 360, height: 576 },
12
+ "8.5x11": { width: 612, height: 792 }
13
+ };
14
+ function getPageDimensions(size, bleed) {
15
+ const trim = TRIM_SIZES[size];
16
+ const extra = bleed ? BLEED_PT * 2 : 0;
17
+ return {
18
+ width: trim.width + extra,
19
+ height: trim.height + extra,
20
+ bleedPt: bleed ? BLEED_PT : 0
21
+ };
22
+ }
23
+
24
+ // src/cmyk/embedCmyk.ts
25
+ import { PDFName, PDFNumber, PDFOperator } from "pdf-lib";
26
+ import sharp from "sharp";
27
+ async function embedImage(pdfDoc, page, buffer, pageWidth, pageHeight) {
28
+ const meta = await sharp(buffer).metadata();
29
+ const width = meta.width;
30
+ const height = meta.height;
31
+ let imageBytes;
32
+ let colorSpace;
33
+ if (meta.format === "jpeg") {
34
+ imageBytes = buffer;
35
+ if (meta.channels === 4) colorSpace = "DeviceCMYK";
36
+ else if (meta.channels === 1) colorSpace = "DeviceGray";
37
+ else colorSpace = "DeviceRGB";
38
+ } else {
39
+ imageBytes = await sharp(buffer).jpeg({ quality: 90 }).toBuffer();
40
+ colorSpace = "DeviceRGB";
41
+ }
42
+ const context = pdfDoc.context;
43
+ const imageStream = context.stream(imageBytes, {
44
+ Type: "XObject",
45
+ Subtype: "Image",
46
+ Width: width,
47
+ Height: height,
48
+ ColorSpace: colorSpace,
49
+ BitsPerComponent: 8,
50
+ Filter: "DCTDecode",
51
+ Length: imageBytes.length
52
+ });
53
+ const imageRef = context.register(imageStream);
54
+ const imageName = PDFName.of(`Img${Date.now()}`);
55
+ const pageNode = page.node;
56
+ let resources = pageNode.get(PDFName.of("Resources"));
57
+ if (!resources) {
58
+ pageNode.set(PDFName.of("Resources"), context.obj({}));
59
+ resources = pageNode.get(PDFName.of("Resources"));
60
+ }
61
+ let xObject = resources.get(PDFName.of("XObject"));
62
+ if (!xObject) {
63
+ resources.set(PDFName.of("XObject"), context.obj({}));
64
+ xObject = resources.get(PDFName.of("XObject"));
65
+ }
66
+ xObject.set(imageName, imageRef);
67
+ const saveState = PDFOperator.of("q");
68
+ const transform = PDFOperator.of("cm", [
69
+ PDFNumber.of(pageWidth),
70
+ PDFNumber.of(0),
71
+ PDFNumber.of(0),
72
+ PDFNumber.of(pageHeight),
73
+ PDFNumber.of(0),
74
+ PDFNumber.of(0)
75
+ ]);
76
+ const drawImage = PDFOperator.of("Do", [imageName]);
77
+ const restoreState = PDFOperator.of("Q");
78
+ page.pushOperators(saveState, transform, drawImage, restoreState);
79
+ }
80
+
81
+ // src/assembler/buildBook.ts
82
+ function expandAssets(assets) {
83
+ const expanded = [];
84
+ for (const asset of assets) {
85
+ const count = asset.repeatCount ?? 1;
86
+ for (let i = 0; i < count; i++) {
87
+ expanded.push({ ...asset, repeatCount: 1 });
88
+ }
89
+ }
90
+ return expanded;
91
+ }
92
+ async function buildBook(config, storage) {
93
+ const pdfDoc = await PDFDocument2.create();
94
+ const { width, height } = getPageDimensions(config.size, config.bleed);
95
+ const pages = expandAssets(config.assets);
96
+ for (const asset of pages) {
97
+ const buffer = await storage.download(asset.path);
98
+ const page = pdfDoc.addPage([width, height]);
99
+ await embedImage(pdfDoc, page, buffer, width, height);
100
+ }
101
+ const pdfBytes = await pdfDoc.save();
102
+ const pdfBuffer = Buffer.from(pdfBytes);
103
+ const url = await storage.upload(pdfBuffer, config.outputPath);
104
+ return {
105
+ buffer: pdfBuffer,
106
+ url,
107
+ pageCount: pdfDoc.getPageCount()
108
+ };
109
+ }
110
+
111
+ // src/validator/validateAsset.ts
112
+ import sharp2 from "sharp";
113
+ var MIN_DPI = 300;
114
+ function mapColorSpace(space) {
115
+ if (!space) return "unknown";
116
+ if (space === "cmyk") return "CMYK";
117
+ if (space === "srgb" || space === "rgb") return "RGB";
118
+ return "unknown";
119
+ }
120
+ async function validateAsset(buffer) {
121
+ const meta = await sharp2(buffer).metadata();
122
+ const warnings = [];
123
+ const colorSpace = mapColorSpace(meta.space);
124
+ const dpi = meta.density ?? 0;
125
+ const width = meta.width ?? 0;
126
+ const height = meta.height ?? 0;
127
+ if (colorSpace !== "CMYK") {
128
+ warnings.push(`Color space is ${colorSpace}, expected CMYK. This file may not print correctly.`);
129
+ }
130
+ if (dpi < MIN_DPI) {
131
+ warnings.push(`DPI is ${dpi}, minimum recommended is ${MIN_DPI} for print.`);
132
+ }
133
+ const hasBleed = width > 0 && height > 0 ? checkHasBleed(width, height, dpi) : false;
134
+ if (!hasBleed) {
135
+ warnings.push("Image does not appear to include a bleed margin (0.125 inch). Consider adding bleed for press-ready output.");
136
+ }
137
+ return {
138
+ valid: warnings.length === 0,
139
+ colorSpace,
140
+ dpi,
141
+ hasBleed,
142
+ warnings
143
+ };
144
+ }
145
+ function checkHasBleed(width, height, dpi) {
146
+ if (dpi === 0) return false;
147
+ const bleedPx = Math.round(0.125 / 1 * dpi);
148
+ const trimSizes = [
149
+ { w: 6, h: 9 },
150
+ { w: 5, h: 8 },
151
+ { w: 8.5, h: 11 }
152
+ ];
153
+ for (const trim of trimSizes) {
154
+ const trimW = Math.round(trim.w * dpi);
155
+ const trimH = Math.round(trim.h * dpi);
156
+ const expectedW = trimW + bleedPx * 2;
157
+ const expectedH = trimH + bleedPx * 2;
158
+ const withinW = Math.abs(width - expectedW) <= expectedW * 0.05;
159
+ const withinH = Math.abs(height - expectedH) <= expectedH * 0.05;
160
+ if (withinW && withinH) return true;
161
+ }
162
+ return false;
163
+ }
164
+
165
+ // src/transformer/mergePdfs.ts
166
+ import { PDFDocument as PDFDocument3 } from "pdf-lib";
167
+ async function mergePdfs(buffers, outputPath, storage) {
168
+ const merged = await PDFDocument3.create();
169
+ for (const buf of buffers) {
170
+ const src = await PDFDocument3.load(buf);
171
+ const pageIndices = src.getPageIndices();
172
+ const copiedPages = await merged.copyPages(src, pageIndices);
173
+ copiedPages.forEach((p) => merged.addPage(p));
174
+ }
175
+ const pdfBytes = await merged.save();
176
+ const pdfBuffer = Buffer.from(pdfBytes);
177
+ const url = await storage.upload(pdfBuffer, outputPath);
178
+ return { buffer: pdfBuffer, url, pageCount: merged.getPageCount() };
179
+ }
180
+
181
+ // src/transformer/splitPdf.ts
182
+ import { PDFDocument as PDFDocument4 } from "pdf-lib";
183
+ async function splitPdf(buffer, ranges, outputBasePath, storage) {
184
+ const src = await PDFDocument4.load(buffer);
185
+ const results = [];
186
+ for (let i = 0; i < ranges.length; i++) {
187
+ const [start, end] = ranges[i];
188
+ const part = await PDFDocument4.create();
189
+ const pageIndices = Array.from(
190
+ { length: end - start + 1 },
191
+ (_, idx) => start + idx
192
+ );
193
+ const copiedPages = await part.copyPages(src, pageIndices);
194
+ copiedPages.forEach((p) => part.addPage(p));
195
+ const pdfBytes = await part.save();
196
+ const pdfBuffer = Buffer.from(pdfBytes);
197
+ const outputPath = `${outputBasePath}/part-${i + 1}.pdf`;
198
+ const url = await storage.upload(pdfBuffer, outputPath);
199
+ results.push({ buffer: pdfBuffer, url, pageCount: part.getPageCount() });
200
+ }
201
+ return results;
202
+ }
203
+
204
+ // src/transformer/compressPdf.ts
205
+ import { PDFDocument as PDFDocument5 } from "pdf-lib";
206
+ async function compressPdf(buffer, outputPath, storage) {
207
+ const pdfDoc = await PDFDocument5.load(buffer);
208
+ const pdfBytes = await pdfDoc.save({ useObjectStreams: true });
209
+ const pdfBuffer = Buffer.from(pdfBytes);
210
+ const url = await storage.upload(pdfBuffer, outputPath);
211
+ return { buffer: pdfBuffer, url, pageCount: pdfDoc.getPageCount() };
212
+ }
213
+
214
+ // src/assembler/PdfForge.ts
215
+ var PdfForge = class {
216
+ constructor(storage) {
217
+ this.storage = storage;
218
+ }
219
+ /**
220
+ * Assembles a press-ready PDF book from a list of CMYK image assets.
221
+ * Assets are downloaded from storage, assembled in order (respecting repeatCount),
222
+ * and the result is uploaded back to storage.
223
+ */
224
+ async buildBook(config) {
225
+ return buildBook(config, this.storage);
226
+ }
227
+ /**
228
+ * Downloads PDFs from storage, merges them into a single PDF,
229
+ * uploads the result to the last path's directory as 'merged.pdf',
230
+ * and returns the output.
231
+ */
232
+ async mergePdfs(paths, outputPath) {
233
+ const buffers = await Promise.all(paths.map((p) => this.storage.download(p)));
234
+ return mergePdfs(buffers, outputPath, this.storage);
235
+ }
236
+ /**
237
+ * Downloads a PDF from storage, splits it into page ranges,
238
+ * uploads each part, and returns an array of PdfOutputs.
239
+ */
240
+ async splitPdf(path, ranges, outputBasePath) {
241
+ const buffer = await this.storage.download(path);
242
+ return splitPdf(buffer, ranges, outputBasePath, this.storage);
243
+ }
244
+ /**
245
+ * Downloads a PDF, compresses it, uploads the result, and returns the output.
246
+ */
247
+ async compress(path, outputPath) {
248
+ const buffer = await this.storage.download(path);
249
+ return compressPdf(buffer, outputPath, this.storage);
250
+ }
251
+ /**
252
+ * Downloads an asset (image or PDF) and validates CMYK color space, DPI, and bleed.
253
+ */
254
+ async validate(path) {
255
+ const buffer = await this.storage.download(path);
256
+ return validateAsset(buffer);
257
+ }
258
+ };
259
+ export {
260
+ PdfForge,
261
+ compressPdf,
262
+ mergePdfs,
263
+ splitPdf,
264
+ validateAsset
265
+ };
package/package.json ADDED
@@ -0,0 +1,57 @@
1
+ {
2
+ "name": "gwan-pdf-engine",
3
+ "version": "0.1.0",
4
+ "private": false,
5
+ "publishConfig": {
6
+ "access": "public"
7
+ },
8
+ "description": "Press-ready CMYK PDF assembly library for planners, journals, and print products",
9
+ "main": "./dist/index.cjs",
10
+ "module": "./dist/index.js",
11
+ "types": "./dist/index.d.ts",
12
+ "exports": {
13
+ ".": {
14
+ "import": "./dist/index.js",
15
+ "require": "./dist/index.cjs"
16
+ },
17
+ "./adapters/gcp": {
18
+ "import": "./dist/adapters/storage/gcp.js",
19
+ "require": "./dist/adapters/storage/gcp.cjs"
20
+ },
21
+ "./adapters/local": {
22
+ "import": "./dist/adapters/storage/local.js",
23
+ "require": "./dist/adapters/storage/local.cjs"
24
+ }
25
+ },
26
+ "files": ["dist"],
27
+ "scripts": {
28
+ "build": "tsup src/index.ts src/adapters/storage/gcp.ts src/adapters/storage/local.ts --format cjs,esm --dts",
29
+ "test": "vitest run",
30
+ "test:watch": "vitest",
31
+ "dev": "tsup --watch"
32
+ },
33
+ "keywords": ["pdf", "cmyk", "print", "planner", "journal"],
34
+ "author": "",
35
+ "license": "MIT",
36
+ "dependencies": {
37
+ "pdf-lib": "^1.17.1",
38
+ "sharp": "^0.34.5"
39
+ },
40
+ "optionalDependencies": {
41
+ "@google-cloud/storage": "^5.18.3"
42
+ },
43
+ "peerDependencies": {
44
+ "@google-cloud/storage": ">=5.18.3"
45
+ },
46
+ "peerDependenciesMeta": {
47
+ "@google-cloud/storage": {
48
+ "optional": true
49
+ }
50
+ },
51
+ "devDependencies": {
52
+ "@types/node": "^25.5.2",
53
+ "tsup": "^8.5.1",
54
+ "typescript": "^6.0.2",
55
+ "vitest": "^3.2.4"
56
+ }
57
+ }