cross-image 0.2.1 → 0.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -135,18 +135,49 @@ class TIFFFormat {
135
135
  const { width, height, data, metadata } = imageData;
136
136
  const opts = options;
137
137
  const compression = opts?.compression ?? "none";
138
+ const grayscale = opts?.grayscale ?? false;
139
+ const rgb = opts?.rgb ?? false;
140
+ // Convert RGBA to grayscale if requested
141
+ let sourceData;
142
+ let samplesPerPixel;
143
+ if (grayscale) {
144
+ sourceData = new Uint8Array(width * height);
145
+ samplesPerPixel = 1;
146
+ for (let i = 0; i < width * height; i++) {
147
+ const r = data[i * 4];
148
+ const g = data[i * 4 + 1];
149
+ const b = data[i * 4 + 2];
150
+ // Use standard luminance formula
151
+ sourceData[i] = Math.round(0.299 * r + 0.587 * g + 0.114 * b);
152
+ }
153
+ }
154
+ else if (rgb) {
155
+ // Convert RGBA to RGB (strip alpha channel)
156
+ sourceData = new Uint8Array(width * height * 3);
157
+ samplesPerPixel = 3;
158
+ for (let i = 0; i < width * height; i++) {
159
+ sourceData[i * 3] = data[i * 4]; // R
160
+ sourceData[i * 3 + 1] = data[i * 4 + 1]; // G
161
+ sourceData[i * 3 + 2] = data[i * 4 + 2]; // B
162
+ }
163
+ }
164
+ else {
165
+ // Keep as RGBA
166
+ sourceData = data;
167
+ samplesPerPixel = 4;
168
+ }
138
169
  // Prepare pixel data (compress if needed)
139
170
  let pixelData;
140
171
  let compressionCode;
141
172
  if (compression === "lzw") {
142
173
  // LZW compress the pixel data
143
174
  const encoder = new tiff_lzw_js_1.TIFFLZWEncoder();
144
- pixelData = encoder.compress(data);
175
+ pixelData = encoder.compress(sourceData);
145
176
  compressionCode = 5;
146
177
  }
147
178
  else {
148
179
  // Uncompressed
149
- pixelData = data;
180
+ pixelData = sourceData;
150
181
  compressionCode = 1;
151
182
  }
152
183
  const result = [];
@@ -164,7 +195,10 @@ class TIFFFormat {
164
195
  // IFD (Image File Directory)
165
196
  const ifdStart = result.length;
166
197
  // Count number of entries (including metadata)
167
- let numEntries = 12; // Base entries (including ExtraSamples)
198
+ // Grayscale: 10 entries (no ExtraSamples)
199
+ // RGB: 11 entries (no ExtraSamples)
200
+ // RGBA: 12 entries (includes ExtraSamples)
201
+ let numEntries = grayscale ? 10 : (rgb ? 11 : 12);
168
202
  if (metadata?.description)
169
203
  numEntries++;
170
204
  if (metadata?.author)
@@ -182,16 +216,28 @@ class TIFFFormat {
182
216
  // ImageHeight (0x0101)
183
217
  this.writeIFDEntry(result, 0x0101, 4, 1, height);
184
218
  // BitsPerSample (0x0102) - 8 bits per channel
185
- this.writeIFDEntry(result, 0x0102, 3, 4, dataOffset);
186
- dataOffset += 8; // 4 x 2-byte values
219
+ if (grayscale) {
220
+ // Single value for grayscale
221
+ this.writeIFDEntry(result, 0x0102, 3, 1, 8);
222
+ }
223
+ else if (rgb) {
224
+ // 3 values for RGB
225
+ this.writeIFDEntry(result, 0x0102, 3, 3, dataOffset);
226
+ dataOffset += 6; // 3 x 2-byte values
227
+ }
228
+ else {
229
+ // 4 values for RGBA
230
+ this.writeIFDEntry(result, 0x0102, 3, 4, dataOffset);
231
+ dataOffset += 8; // 4 x 2-byte values
232
+ }
187
233
  // Compression (0x0103) - 1 = uncompressed, 5 = LZW
188
234
  this.writeIFDEntry(result, 0x0103, 3, 1, compressionCode);
189
- // PhotometricInterpretation (0x0106) - 2 = RGB
190
- this.writeIFDEntry(result, 0x0106, 3, 1, 2);
235
+ // PhotometricInterpretation (0x0106) - 1 = BlackIsZero (grayscale), 2 = RGB
236
+ this.writeIFDEntry(result, 0x0106, 3, 1, grayscale ? 1 : 2);
191
237
  // StripOffsets (0x0111)
192
238
  this.writeIFDEntry(result, 0x0111, 4, 1, 8);
193
- // SamplesPerPixel (0x0115) - 4 (RGBA)
194
- this.writeIFDEntry(result, 0x0115, 3, 1, 4);
239
+ // SamplesPerPixel (0x0115) - 1 for grayscale, 3 for RGB, 4 for RGBA
240
+ this.writeIFDEntry(result, 0x0115, 3, 1, samplesPerPixel);
195
241
  // RowsPerStrip (0x0116)
196
242
  this.writeIFDEntry(result, 0x0116, 4, 1, height);
197
243
  // StripByteCounts (0x0117)
@@ -204,8 +250,10 @@ class TIFFFormat {
204
250
  const yResOffset = dataOffset;
205
251
  this.writeIFDEntry(result, 0x011b, 5, 1, yResOffset);
206
252
  dataOffset += 8;
207
- // ExtraSamples (0x0152) - 2 = unassociated alpha
208
- this.writeIFDEntry(result, 0x0152, 3, 1, 2);
253
+ // ExtraSamples (0x0152) - 2 = unassociated alpha (only for RGBA)
254
+ if (!grayscale && !rgb) {
255
+ this.writeIFDEntry(result, 0x0152, 3, 1, 2);
256
+ }
209
257
  // Optional metadata entries
210
258
  if (metadata?.description) {
211
259
  const descBytes = new TextEncoder().encode(metadata.description + "\0");
@@ -232,11 +280,18 @@ class TIFFFormat {
232
280
  // Next IFD offset (0 = no more IFDs)
233
281
  this.writeUint32LE(result, 0);
234
282
  // Write variable-length data
235
- // BitsPerSample values (must be written first to match offset calculation)
236
- this.writeUint16LE(result, 8);
237
- this.writeUint16LE(result, 8);
238
- this.writeUint16LE(result, 8);
239
- this.writeUint16LE(result, 8);
283
+ // BitsPerSample values (only for RGB and RGBA, not for grayscale)
284
+ if (rgb) {
285
+ this.writeUint16LE(result, 8);
286
+ this.writeUint16LE(result, 8);
287
+ this.writeUint16LE(result, 8);
288
+ }
289
+ else if (!grayscale) {
290
+ this.writeUint16LE(result, 8);
291
+ this.writeUint16LE(result, 8);
292
+ this.writeUint16LE(result, 8);
293
+ this.writeUint16LE(result, 8);
294
+ }
240
295
  // XResolution value (rational)
241
296
  const dpiX = metadata?.dpiX ?? DEFAULT_DPI;
242
297
  this.writeUint32LE(result, dpiX);
@@ -699,14 +754,23 @@ class TIFFFormat {
699
754
  }
700
755
  // Check photometric interpretation
701
756
  const photometric = this.getIFDValue(data, ifdOffset, 0x0106, isLittleEndian);
702
- if (photometric !== 2) {
703
- // Only support RGB (photometric = 2)
757
+ if (photometric !== 0 && photometric !== 1 && photometric !== 2) {
758
+ // Support: 0 = WhiteIsZero, 1 = BlackIsZero, 2 = RGB
704
759
  return null;
705
760
  }
706
761
  // Get samples per pixel
707
762
  const samplesPerPixel = this.getIFDValue(data, ifdOffset, 0x0115, isLittleEndian);
708
- if (!samplesPerPixel || (samplesPerPixel !== 3 && samplesPerPixel !== 4)) {
709
- // Only support RGB (3) or RGBA (4)
763
+ // For grayscale (photometric 0 or 1), expect 1 sample per pixel
764
+ // For RGB, expect 3 or 4 samples per pixel
765
+ if (!samplesPerPixel) {
766
+ return null;
767
+ }
768
+ if ((photometric === 0 || photometric === 1) && samplesPerPixel !== 1) {
769
+ // Grayscale requires 1 sample per pixel
770
+ return null;
771
+ }
772
+ if (photometric === 2 && samplesPerPixel !== 3 && samplesPerPixel !== 4) {
773
+ // RGB requires 3 or 4 samples per pixel
710
774
  return null;
711
775
  }
712
776
  // Get strip offset
@@ -736,17 +800,40 @@ class TIFFFormat {
736
800
  // Convert to RGBA
737
801
  const rgba = new Uint8Array(width * height * 4);
738
802
  let srcPos = 0;
739
- for (let y = 0; y < height; y++) {
740
- for (let x = 0; x < width; x++) {
741
- const dstIdx = (y * width + x) * 4;
742
- if (srcPos + samplesPerPixel > pixelData.length) {
743
- return null; // Not enough data
803
+ if (photometric === 0 || photometric === 1) {
804
+ // Grayscale image
805
+ for (let y = 0; y < height; y++) {
806
+ for (let x = 0; x < width; x++) {
807
+ const dstIdx = (y * width + x) * 4;
808
+ if (srcPos >= pixelData.length) {
809
+ return null; // Not enough data
810
+ }
811
+ let gray = pixelData[srcPos++];
812
+ // WhiteIsZero (0) means 0=white, 255=black, so invert
813
+ if (photometric === 0) {
814
+ gray = 255 - gray;
815
+ }
816
+ rgba[dstIdx] = gray; // R
817
+ rgba[dstIdx + 1] = gray; // G
818
+ rgba[dstIdx + 2] = gray; // B
819
+ rgba[dstIdx + 3] = 255; // A (opaque)
820
+ }
821
+ }
822
+ }
823
+ else {
824
+ // RGB/RGBA image
825
+ for (let y = 0; y < height; y++) {
826
+ for (let x = 0; x < width; x++) {
827
+ const dstIdx = (y * width + x) * 4;
828
+ if (srcPos + samplesPerPixel > pixelData.length) {
829
+ return null; // Not enough data
830
+ }
831
+ // TIFF stores RGB(A) in order
832
+ rgba[dstIdx] = pixelData[srcPos++]; // R
833
+ rgba[dstIdx + 1] = pixelData[srcPos++]; // G
834
+ rgba[dstIdx + 2] = pixelData[srcPos++]; // B
835
+ rgba[dstIdx + 3] = samplesPerPixel === 4 ? pixelData[srcPos++] : 255; // A
744
836
  }
745
- // TIFF stores RGB(A) in order
746
- rgba[dstIdx] = pixelData[srcPos++]; // R
747
- rgba[dstIdx + 1] = pixelData[srcPos++]; // G
748
- rgba[dstIdx + 2] = pixelData[srcPos++]; // B
749
- rgba[dstIdx + 3] = samplesPerPixel === 4 ? pixelData[srcPos++] : 255; // A
750
837
  }
751
838
  }
752
839
  return rgba;
@@ -764,14 +851,23 @@ class TIFFFormat {
764
851
  }
765
852
  // Check photometric interpretation
766
853
  const photometric = this.getIFDValue(data, ifdOffset, 0x0106, isLittleEndian);
767
- if (photometric !== 2) {
768
- // Only support RGB (photometric = 2)
854
+ if (photometric !== 0 && photometric !== 1 && photometric !== 2) {
855
+ // Support: 0 = WhiteIsZero, 1 = BlackIsZero, 2 = RGB
769
856
  return null;
770
857
  }
771
858
  // Get samples per pixel
772
859
  const samplesPerPixel = this.getIFDValue(data, ifdOffset, 0x0115, isLittleEndian);
773
- if (!samplesPerPixel || (samplesPerPixel !== 3 && samplesPerPixel !== 4)) {
774
- // Only support RGB (3) or RGBA (4)
860
+ // For grayscale (photometric 0 or 1), expect 1 sample per pixel
861
+ // For RGB, expect 3 or 4 samples per pixel
862
+ if (!samplesPerPixel) {
863
+ return null;
864
+ }
865
+ if ((photometric === 0 || photometric === 1) && samplesPerPixel !== 1) {
866
+ // Grayscale requires 1 sample per pixel
867
+ return null;
868
+ }
869
+ if (photometric === 2 && samplesPerPixel !== 3 && samplesPerPixel !== 4) {
870
+ // RGB requires 3 or 4 samples per pixel
775
871
  return null;
776
872
  }
777
873
  // Get strip offset
@@ -801,17 +897,40 @@ class TIFFFormat {
801
897
  // Convert to RGBA
802
898
  const rgba = new Uint8Array(width * height * 4);
803
899
  let srcPos = 0;
804
- for (let y = 0; y < height; y++) {
805
- for (let x = 0; x < width; x++) {
806
- const dstIdx = (y * width + x) * 4;
807
- if (srcPos + samplesPerPixel > pixelData.length) {
808
- return null; // Not enough data
900
+ if (photometric === 0 || photometric === 1) {
901
+ // Grayscale image
902
+ for (let y = 0; y < height; y++) {
903
+ for (let x = 0; x < width; x++) {
904
+ const dstIdx = (y * width + x) * 4;
905
+ if (srcPos >= pixelData.length) {
906
+ return null; // Not enough data
907
+ }
908
+ let gray = pixelData[srcPos++];
909
+ // WhiteIsZero (0) means 0=white, 255=black, so invert
910
+ if (photometric === 0) {
911
+ gray = 255 - gray;
912
+ }
913
+ rgba[dstIdx] = gray; // R
914
+ rgba[dstIdx + 1] = gray; // G
915
+ rgba[dstIdx + 2] = gray; // B
916
+ rgba[dstIdx + 3] = 255; // A (opaque)
917
+ }
918
+ }
919
+ }
920
+ else {
921
+ // RGB/RGBA image
922
+ for (let y = 0; y < height; y++) {
923
+ for (let x = 0; x < width; x++) {
924
+ const dstIdx = (y * width + x) * 4;
925
+ if (srcPos + samplesPerPixel > pixelData.length) {
926
+ return null; // Not enough data
927
+ }
928
+ // TIFF stores RGB(A) in order
929
+ rgba[dstIdx] = pixelData[srcPos++]; // R
930
+ rgba[dstIdx + 1] = pixelData[srcPos++]; // G
931
+ rgba[dstIdx + 2] = pixelData[srcPos++]; // B
932
+ rgba[dstIdx + 3] = samplesPerPixel === 4 ? pixelData[srcPos++] : 255; // A
809
933
  }
810
- // TIFF stores RGB(A) in order
811
- rgba[dstIdx] = pixelData[srcPos++]; // R
812
- rgba[dstIdx + 1] = pixelData[srcPos++]; // G
813
- rgba[dstIdx + 2] = pixelData[srcPos++]; // B
814
- rgba[dstIdx + 3] = samplesPerPixel === 4 ? pixelData[srcPos++] : 255; // A
815
934
  }
816
935
  }
817
936
  return rgba;
@@ -207,6 +207,36 @@ export declare class Image {
207
207
  * @returns This image instance for chaining
208
208
  */
209
209
  grayscale(): this;
210
+ /**
211
+ * Apply sepia tone effect to the image
212
+ * @returns This image instance for chaining
213
+ */
214
+ sepia(): this;
215
+ /**
216
+ * Apply box blur filter to the image
217
+ * @param radius Blur radius (default: 1)
218
+ * @returns This image instance for chaining
219
+ */
220
+ blur(radius?: number): this;
221
+ /**
222
+ * Apply Gaussian blur filter to the image
223
+ * @param radius Blur radius (default: 1)
224
+ * @param sigma Optional standard deviation (if not provided, calculated from radius)
225
+ * @returns This image instance for chaining
226
+ */
227
+ gaussianBlur(radius?: number, sigma?: number): this;
228
+ /**
229
+ * Apply sharpen filter to the image
230
+ * @param amount Sharpening amount (0-1, default: 0.5)
231
+ * @returns This image instance for chaining
232
+ */
233
+ sharpen(amount?: number): this;
234
+ /**
235
+ * Apply median filter to reduce noise
236
+ * @param radius Filter radius (default: 1)
237
+ * @returns This image instance for chaining
238
+ */
239
+ medianFilter(radius?: number): this;
210
240
  /**
211
241
  * Fill a rectangular region with a color
212
242
  * @param x Starting X position
@@ -14,6 +14,7 @@ const ico_js_1 = require("./formats/ico.js");
14
14
  const dng_js_1 = require("./formats/dng.js");
15
15
  const pam_js_1 = require("./formats/pam.js");
16
16
  const pcx_js_1 = require("./formats/pcx.js");
17
+ const ppm_js_1 = require("./formats/ppm.js");
17
18
  const ascii_js_1 = require("./formats/ascii.js");
18
19
  const security_js_1 = require("./utils/security.js");
19
20
  /**
@@ -461,6 +462,61 @@ class Image {
461
462
  this.imageData.data = (0, image_processing_js_1.grayscale)(this.imageData.data);
462
463
  return this;
463
464
  }
465
+ /**
466
+ * Apply sepia tone effect to the image
467
+ * @returns This image instance for chaining
468
+ */
469
+ sepia() {
470
+ if (!this.imageData)
471
+ throw new Error("No image loaded");
472
+ this.imageData.data = (0, image_processing_js_1.sepia)(this.imageData.data);
473
+ return this;
474
+ }
475
+ /**
476
+ * Apply box blur filter to the image
477
+ * @param radius Blur radius (default: 1)
478
+ * @returns This image instance for chaining
479
+ */
480
+ blur(radius = 1) {
481
+ if (!this.imageData)
482
+ throw new Error("No image loaded");
483
+ this.imageData.data = (0, image_processing_js_1.boxBlur)(this.imageData.data, this.imageData.width, this.imageData.height, radius);
484
+ return this;
485
+ }
486
+ /**
487
+ * Apply Gaussian blur filter to the image
488
+ * @param radius Blur radius (default: 1)
489
+ * @param sigma Optional standard deviation (if not provided, calculated from radius)
490
+ * @returns This image instance for chaining
491
+ */
492
+ gaussianBlur(radius = 1, sigma) {
493
+ if (!this.imageData)
494
+ throw new Error("No image loaded");
495
+ this.imageData.data = (0, image_processing_js_1.gaussianBlur)(this.imageData.data, this.imageData.width, this.imageData.height, radius, sigma);
496
+ return this;
497
+ }
498
+ /**
499
+ * Apply sharpen filter to the image
500
+ * @param amount Sharpening amount (0-1, default: 0.5)
501
+ * @returns This image instance for chaining
502
+ */
503
+ sharpen(amount = 0.5) {
504
+ if (!this.imageData)
505
+ throw new Error("No image loaded");
506
+ this.imageData.data = (0, image_processing_js_1.sharpen)(this.imageData.data, this.imageData.width, this.imageData.height, amount);
507
+ return this;
508
+ }
509
+ /**
510
+ * Apply median filter to reduce noise
511
+ * @param radius Filter radius (default: 1)
512
+ * @returns This image instance for chaining
513
+ */
514
+ medianFilter(radius = 1) {
515
+ if (!this.imageData)
516
+ throw new Error("No image loaded");
517
+ this.imageData.data = (0, image_processing_js_1.medianFilter)(this.imageData.data, this.imageData.width, this.imageData.height, radius);
518
+ return this;
519
+ }
464
520
  /**
465
521
  * Fill a rectangular region with a color
466
522
  * @param x Starting X position
@@ -567,6 +623,7 @@ Object.defineProperty(Image, "formats", {
567
623
  new dng_js_1.DNGFormat(),
568
624
  new pam_js_1.PAMFormat(),
569
625
  new pcx_js_1.PCXFormat(),
626
+ new ppm_js_1.PPMFormat(),
570
627
  new ascii_js_1.ASCIIFormat(),
571
628
  ]
572
629
  });
@@ -88,4 +88,47 @@ export declare function crop(data: Uint8Array, width: number, height: number, x:
88
88
  width: number;
89
89
  height: number;
90
90
  };
91
+ /**
92
+ * Apply a box blur filter to an image
93
+ * @param data Image data (RGBA)
94
+ * @param width Image width
95
+ * @param height Image height
96
+ * @param radius Blur radius (default: 1)
97
+ * @returns New image data with box blur applied
98
+ */
99
+ export declare function boxBlur(data: Uint8Array, width: number, height: number, radius?: number): Uint8Array;
100
+ /**
101
+ * Apply Gaussian blur to an image
102
+ * @param data Image data (RGBA)
103
+ * @param width Image width
104
+ * @param height Image height
105
+ * @param radius Blur radius (default: 1)
106
+ * @param sigma Optional standard deviation (if not provided, calculated from radius)
107
+ * @returns New image data with Gaussian blur applied
108
+ */
109
+ export declare function gaussianBlur(data: Uint8Array, width: number, height: number, radius?: number, sigma?: number): Uint8Array;
110
+ /**
111
+ * Apply sharpen filter to an image
112
+ * @param data Image data (RGBA)
113
+ * @param width Image width
114
+ * @param height Image height
115
+ * @param amount Sharpening amount (0-1, default: 0.5)
116
+ * @returns New image data with sharpening applied
117
+ */
118
+ export declare function sharpen(data: Uint8Array, width: number, height: number, amount?: number): Uint8Array;
119
+ /**
120
+ * Apply sepia tone effect to an image
121
+ * @param data Image data (RGBA)
122
+ * @returns New image data with sepia tone applied
123
+ */
124
+ export declare function sepia(data: Uint8Array): Uint8Array;
125
+ /**
126
+ * Apply median filter to reduce noise
127
+ * @param data Image data (RGBA)
128
+ * @param width Image width
129
+ * @param height Image height
130
+ * @param radius Filter radius (default: 1)
131
+ * @returns New image data with median filter applied
132
+ */
133
+ export declare function medianFilter(data: Uint8Array, width: number, height: number, radius?: number): Uint8Array;
91
134
  //# sourceMappingURL=image_processing.d.ts.map