@huggingface/transformers 3.0.0-alpha.14 → 3.0.0-alpha.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. package/README.md +12 -6
  2. package/dist/ort-wasm-simd-threaded.jsep.wasm +0 -0
  3. package/dist/transformers.cjs +678 -443
  4. package/dist/transformers.cjs.map +1 -1
  5. package/dist/transformers.js +1107 -825
  6. package/dist/transformers.js.map +1 -1
  7. package/dist/transformers.min.cjs +14 -14
  8. package/dist/transformers.min.cjs.map +1 -1
  9. package/dist/transformers.min.js +17 -17
  10. package/dist/transformers.min.js.map +1 -1
  11. package/dist/transformers.min.mjs +52 -52
  12. package/dist/transformers.min.mjs.map +1 -1
  13. package/dist/transformers.mjs +699 -444
  14. package/dist/transformers.mjs.map +1 -1
  15. package/package.json +4 -5
  16. package/src/configs.js +16 -4
  17. package/src/env.js +4 -4
  18. package/src/models.js +151 -58
  19. package/src/pipelines.js +5 -4
  20. package/src/processors.js +313 -285
  21. package/src/tokenizers.js +111 -72
  22. package/src/utils/core.js +12 -0
  23. package/src/utils/data-structures.js +13 -11
  24. package/src/utils/hub.js +1 -1
  25. package/src/utils/maths.js +13 -4
  26. package/types/configs.d.ts +25 -3
  27. package/types/configs.d.ts.map +1 -1
  28. package/types/models.d.ts +63 -2
  29. package/types/models.d.ts.map +1 -1
  30. package/types/pipelines.d.ts.map +1 -1
  31. package/types/processors.d.ts +42 -52
  32. package/types/processors.d.ts.map +1 -1
  33. package/types/tokenizers.d.ts +23 -1
  34. package/types/tokenizers.d.ts.map +1 -1
  35. package/types/utils/core.d.ts +7 -0
  36. package/types/utils/core.d.ts.map +1 -1
  37. package/types/utils/data-structures.d.ts +6 -6
  38. package/types/utils/data-structures.d.ts.map +1 -1
  39. package/types/utils/hub.d.ts +1 -1
  40. package/types/utils/hub.d.ts.map +1 -1
  41. package/types/utils/maths.d.ts.map +1 -1
package/src/processors.js CHANGED
@@ -40,7 +40,7 @@ import {
40
40
  } from './utils/maths.js';
41
41
 
42
42
 
43
- import { Tensor, cat, interpolate, stack, interpolate_4d } from './utils/tensor.js';
43
+ import { Tensor, cat, interpolate, stack, interpolate_4d, full } from './utils/tensor.js';
44
44
 
45
45
  import { RawImage } from './utils/image.js';
46
46
  import {
@@ -73,7 +73,7 @@ function center_to_corners_format([centerX, centerY, width, height]) {
73
73
  * @param {Tensor} outputs.logits The logits
74
74
  * @param {Tensor} outputs.pred_boxes The predicted boxes.
75
75
  * @param {number} [threshold=0.5] The threshold to use for the scores.
76
- * @param {number[][]} [target_sizes=null] The sizes of the original images.
76
+ * @param {[number, number][]} [target_sizes=null] The sizes of the original images.
77
77
  * @param {boolean} [is_zero_shot=false] Whether zero-shot object detection was performed.
78
78
  * @return {Object[]} An array of objects containing the post-processed outputs.
79
79
  * @private
@@ -154,7 +154,7 @@ function post_process_object_detection(outputs, threshold = 0.5, target_sizes =
154
154
  /**
155
155
  * Post-processes the outputs of the model (for semantic segmentation).
156
156
  * @param {*} outputs Raw outputs of the model.
157
- * @param {number[][]} [target_sizes=null] List of tuples corresponding to the requested final size
157
+ * @param {[number, number][]} [target_sizes=null] List of tuples corresponding to the requested final size
158
158
  * (height, width) of each prediction. If unset, predictions will not be resized.
159
159
  * @returns {{segmentation: Tensor; labels: number[]}[]} The semantic segmentation maps.
160
160
  */
@@ -214,6 +214,300 @@ function post_process_semantic_segmentation(outputs, target_sizes = null) {
214
214
  return toReturn;
215
215
  }
216
216
 
217
+
218
+ /**
219
+ * Binarize the given masks using `object_mask_threshold`, it returns the associated values of `masks`, `scores` and `labels`.
220
+ * @param {Tensor} class_logits The class logits.
221
+ * @param {Tensor} mask_logits The mask logits.
222
+ * @param {number} object_mask_threshold A number between 0 and 1 used to binarize the masks.
223
+ * @param {number} num_labels The number of labels.
224
+ * @returns {[Tensor[], number[], number[]]} The binarized masks, the scores, and the labels.
225
+ * @private
226
+ */
227
+ function remove_low_and_no_objects(class_logits, mask_logits, object_mask_threshold, num_labels) {
228
+
229
+ const mask_probs_item = [];
230
+ const pred_scores_item = [];
231
+ const pred_labels_item = [];
232
+
233
+ for (let j = 0; j < class_logits.dims[0]; ++j) {
234
+ const cls = class_logits[j];
235
+ const mask = mask_logits[j];
236
+
237
+ const pred_label = max(cls.data)[1];
238
+ if (pred_label === num_labels) {
239
+ // Is the background, so we ignore it
240
+ continue;
241
+ }
242
+
243
+ const scores = softmax(cls.data);
244
+ const pred_score = scores[pred_label];
245
+ if (pred_score > object_mask_threshold) {
246
+ mask_probs_item.push(mask);
247
+ pred_scores_item.push(pred_score);
248
+ pred_labels_item.push(pred_label);
249
+ }
250
+ }
251
+
252
+ return [mask_probs_item, pred_scores_item, pred_labels_item];
253
+ }
254
+
255
+ /**
256
+ * Checks whether the segment is valid or not.
257
+ * @param {Int32Array} mask_labels Labels for each pixel in the mask.
258
+ * @param {Tensor[]} mask_probs Probabilities for each pixel in the masks.
259
+ * @param {number} k The class id of the segment.
260
+ * @param {number} mask_threshold The mask threshold.
261
+ * @param {number} overlap_mask_area_threshold The overlap mask area threshold.
262
+ * @returns {[boolean, number[]]} Whether the segment is valid or not, and the indices of the valid labels.
263
+ * @private
264
+ */
265
+ function check_segment_validity(
266
+ mask_labels,
267
+ mask_probs,
268
+ k,
269
+ mask_threshold = 0.5,
270
+ overlap_mask_area_threshold = 0.8
271
+ ) {
272
+ // mask_k is a 1D array of indices, indicating where the mask is equal to k
273
+ const mask_k = [];
274
+ let mask_k_area = 0;
275
+ let original_area = 0;
276
+
277
+ const mask_probs_k_data = mask_probs[k].data;
278
+
279
+ // Compute the area of all the stuff in query k
280
+ for (let i = 0; i < mask_labels.length; ++i) {
281
+ if (mask_labels[i] === k) {
282
+ mask_k.push(i);
283
+ ++mask_k_area;
284
+ }
285
+
286
+ if (mask_probs_k_data[i] >= mask_threshold) {
287
+ ++original_area;
288
+ }
289
+ }
290
+ let mask_exists = mask_k_area > 0 && original_area > 0;
291
+
292
+ // Eliminate disconnected tiny segments
293
+ if (mask_exists) {
294
+ // Perform additional check
295
+ let area_ratio = mask_k_area / original_area;
296
+ mask_exists = area_ratio > overlap_mask_area_threshold;
297
+ }
298
+
299
+ return [mask_exists, mask_k]
300
+ }
301
+
302
+ /**
303
+ * Computes the segments.
304
+ * @param {Tensor[]} mask_probs The mask probabilities.
305
+ * @param {number[]} pred_scores The predicted scores.
306
+ * @param {number[]} pred_labels The predicted labels.
307
+ * @param {number} mask_threshold The mask threshold.
308
+ * @param {number} overlap_mask_area_threshold The overlap mask area threshold.
309
+ * @param {Set<number>} label_ids_to_fuse The label ids to fuse.
310
+ * @param {number[]} target_size The target size of the image.
311
+ * @returns {[Tensor, Array<{id: number, label_id: number, score: number}>]} The computed segments.
312
+ * @private
313
+ */
314
+ function compute_segments(
315
+ mask_probs,
316
+ pred_scores,
317
+ pred_labels,
318
+ mask_threshold,
319
+ overlap_mask_area_threshold,
320
+ label_ids_to_fuse = null,
321
+ target_size = null,
322
+ ) {
323
+ const [height, width] = target_size ?? mask_probs[0].dims;
324
+
325
+ const segmentation = new Tensor(
326
+ 'int32',
327
+ new Int32Array(height * width),
328
+ [height, width]
329
+ );
330
+ const segments = [];
331
+
332
+ // 1. If target_size is not null, we need to resize the masks to the target size
333
+ if (target_size !== null) {
334
+ // resize the masks to the target size
335
+ for (let i = 0; i < mask_probs.length; ++i) {
336
+ mask_probs[i] = interpolate(mask_probs[i], target_size, 'bilinear', false);
337
+ }
338
+ }
339
+
340
+ // 2. Weigh each mask by its prediction score
341
+ // NOTE: `mask_probs` is updated in-place
342
+ //
343
+ // Temporary storage for the best label/scores for each pixel ([height, width]):
344
+ const mask_labels = new Int32Array(mask_probs[0].data.length);
345
+ const bestScores = new Float32Array(mask_probs[0].data.length);
346
+
347
+ for (let i = 0; i < mask_probs.length; ++i) {
348
+ let score = pred_scores[i];
349
+
350
+ const mask_probs_i_data = mask_probs[i].data;
351
+
352
+ for (let j = 0; j < mask_probs_i_data.length; ++j) {
353
+ mask_probs_i_data[j] *= score
354
+ if (mask_probs_i_data[j] > bestScores[j]) {
355
+ mask_labels[j] = i;
356
+ bestScores[j] = mask_probs_i_data[j];
357
+ }
358
+ }
359
+ }
360
+
361
+ let current_segment_id = 0;
362
+
363
+ // let stuff_memory_list = {}
364
+ const segmentation_data = segmentation.data;
365
+ for (let k = 0; k < pred_labels.length; ++k) {
366
+ const pred_class = pred_labels[k];
367
+
368
+ // TODO add `should_fuse`
369
+ // let should_fuse = pred_class in label_ids_to_fuse
370
+
371
+ // Check if mask exists and large enough to be a segment
372
+ const [mask_exists, mask_k] = check_segment_validity(
373
+ mask_labels,
374
+ mask_probs,
375
+ k,
376
+ mask_threshold,
377
+ overlap_mask_area_threshold
378
+ )
379
+
380
+ if (!mask_exists) {
381
+ // Nothing to see here
382
+ continue;
383
+ }
384
+
385
+ // TODO
386
+ // if (pred_class in stuff_memory_list) {
387
+ // current_segment_id = stuff_memory_list[pred_class]
388
+ // } else {
389
+ // current_segment_id += 1;
390
+ // }
391
+ ++current_segment_id;
392
+
393
+
394
+ // Add current object segment to final segmentation map
395
+ for (const index of mask_k) {
396
+ segmentation_data[index] = current_segment_id;
397
+ }
398
+
399
+ segments.push({
400
+ id: current_segment_id,
401
+ label_id: pred_class,
402
+ // was_fused: should_fuse, TODO
403
+ score: pred_scores[k],
404
+ })
405
+
406
+ // TODO
407
+ // if(should_fuse){
408
+ // stuff_memory_list[pred_class] = current_segment_id
409
+ // }
410
+ }
411
+
412
+ return [segmentation, segments];
413
+ }
414
+
415
+
416
+ /**
417
+ * Post-process the model output to generate the final panoptic segmentation.
418
+ * @param {*} outputs The model output to post process
419
+ * @param {number} [threshold=0.5] The probability score threshold to keep predicted instance masks.
420
+ * @param {number} [mask_threshold=0.5] Threshold to use when turning the predicted masks into binary values.
421
+ * @param {number} [overlap_mask_area_threshold=0.8] The overlap mask area threshold to merge or discard small disconnected parts within each binary instance mask.
422
+ * @param {Set<number>} [label_ids_to_fuse=null] The labels in this state will have all their instances be fused together.
423
+ * @param {[number, number][]} [target_sizes=null] The target sizes to resize the masks to.
424
+ * @returns {Array<{ segmentation: Tensor, segments_info: Array<{id: number, label_id: number, score: number}>}>}
425
+ */
426
+ function post_process_panoptic_segmentation(
427
+ outputs,
428
+ threshold = 0.5,
429
+ mask_threshold = 0.5,
430
+ overlap_mask_area_threshold = 0.8,
431
+ label_ids_to_fuse = null,
432
+ target_sizes = null,
433
+ ) {
434
+ if (label_ids_to_fuse === null) {
435
+ console.warn("`label_ids_to_fuse` unset. No instance will be fused.")
436
+ label_ids_to_fuse = new Set();
437
+ }
438
+
439
+ const class_queries_logits = outputs.class_queries_logits ?? outputs.logits; // [batch_size, num_queries, num_classes+1]
440
+ const masks_queries_logits = outputs.masks_queries_logits ?? outputs.pred_masks; // [batch_size, num_queries, height, width]
441
+
442
+ const mask_probs = masks_queries_logits.sigmoid() // [batch_size, num_queries, height, width]
443
+
444
+ let [batch_size, num_queries, num_labels] = class_queries_logits.dims;
445
+ num_labels -= 1; // Remove last class (background)
446
+
447
+ if (target_sizes !== null && target_sizes.length !== batch_size) {
448
+ throw Error("Make sure that you pass in as many target sizes as the batch dimension of the logits")
449
+ }
450
+
451
+ let toReturn = [];
452
+ for (let i = 0; i < batch_size; ++i) {
453
+ let target_size = target_sizes !== null ? target_sizes[i] : null;
454
+
455
+ let class_logits = class_queries_logits[i];
456
+ let mask_logits = mask_probs[i];
457
+
458
+ let [mask_probs_item, pred_scores_item, pred_labels_item] = remove_low_and_no_objects(class_logits, mask_logits, threshold, num_labels);
459
+
460
+ if (pred_labels_item.length === 0) {
461
+ // No mask found
462
+ let [height, width] = target_size ?? mask_logits.dims.slice(-2);
463
+
464
+ let segmentation = new Tensor(
465
+ 'int32',
466
+ new Int32Array(height * width).fill(-1),
467
+ [height, width]
468
+ )
469
+ toReturn.push({
470
+ segmentation: segmentation,
471
+ segments_info: []
472
+ });
473
+ continue;
474
+ }
475
+
476
+
477
+ // Get segmentation map and segment information of batch item
478
+ let [segmentation, segments] = compute_segments(
479
+ mask_probs_item,
480
+ pred_scores_item,
481
+ pred_labels_item,
482
+ mask_threshold,
483
+ overlap_mask_area_threshold,
484
+ label_ids_to_fuse,
485
+ target_size,
486
+ )
487
+
488
+ toReturn.push({
489
+ segmentation: segmentation,
490
+ segments_info: segments
491
+ })
492
+ }
493
+
494
+ return toReturn;
495
+ }
496
+
497
+
498
+ /**
499
+ * Post-processes the outputs of the model (for instance segmentation).
500
+ * @param {*} outputs Raw outputs of the model.
501
+ * @param {number} [threshold=0.5] The probability score threshold to keep predicted instance masks.
502
+ * @param {[number, number][]} [target_sizes=null] List of tuples corresponding to the requested final size
503
+ * (height, width) of each prediction. If unset, predictions will not be resized.
504
+ * @returns {Array<{ segmentation: Tensor, segments_info: Array<{id: number, label_id: number, score: number}>}>}
505
+ */
506
+ function post_process_instance_segmentation(outputs, threshold = 0.5, target_sizes = null) {
507
+ throw new Error('Not implemented yet');
508
+ return [];
509
+ }
510
+
217
511
  /**
218
512
  * Named tuple to indicate the order we are using is (height x width), even though
219
513
  * the Graphics’ industry standard is (width x height).
@@ -802,6 +1096,7 @@ export class SegformerFeatureExtractor extends ImageFeatureExtractor {
802
1096
  return post_process_semantic_segmentation(...args);
803
1097
  }
804
1098
  }
1099
+ export class PvtImageProcessor extends ImageFeatureExtractor { }
805
1100
  export class DPTFeatureExtractor extends ImageFeatureExtractor { }
806
1101
  export class DPTImageProcessor extends DPTFeatureExtractor { } // NOTE: extends DPTFeatureExtractor
807
1102
  export class BitImageProcessor extends ImageFeatureExtractor { }
@@ -941,302 +1236,32 @@ export class DetrFeatureExtractor extends ImageFeatureExtractor {
941
1236
  // TODO support different mask sizes (not just 64x64)
942
1237
  // Currently, just fill pixel mask with 1s
943
1238
  const maskSize = [result.pixel_values.dims[0], 64, 64];
944
- const pixel_mask = new Tensor(
945
- 'int64',
946
- new BigInt64Array(maskSize.reduce((a, b) => a * b)).fill(1n),
947
- maskSize
948
- );
1239
+ const pixel_mask = full(maskSize, 1n);
949
1240
 
950
1241
  return { ...result, pixel_mask };
951
1242
  }
952
1243
 
953
- /**
954
- * Post-processes the outputs of the model (for object detection).
955
- * @param {Object} outputs The outputs of the model that must be post-processed
956
- * @param {Tensor} outputs.logits The logits
957
- * @param {Tensor} outputs.pred_boxes The predicted boxes.
958
- * @return {Object[]} An array of objects containing the post-processed outputs.
959
- */
960
-
961
1244
  /** @type {typeof post_process_object_detection} */
962
1245
  post_process_object_detection(...args) {
963
1246
  return post_process_object_detection(...args);
964
1247
  }
965
1248
 
966
- /**
967
- * Binarize the given masks using `object_mask_threshold`, it returns the associated values of `masks`, `scores` and `labels`.
968
- * @param {Tensor} class_logits The class logits.
969
- * @param {Tensor} mask_logits The mask logits.
970
- * @param {number} object_mask_threshold A number between 0 and 1 used to binarize the masks.
971
- * @param {number} num_labels The number of labels.
972
- * @returns {[Tensor[], number[], number[]]} The binarized masks, the scores, and the labels.
973
- */
974
- remove_low_and_no_objects(class_logits, mask_logits, object_mask_threshold, num_labels) {
975
-
976
- let mask_probs_item = [];
977
- let pred_scores_item = [];
978
- let pred_labels_item = [];
979
-
980
- for (let j = 0; j < class_logits.dims[0]; ++j) {
981
- let cls = class_logits[j];
982
- let mask = mask_logits[j];
983
-
984
- let pred_label = max(cls.data)[1];
985
- if (pred_label === num_labels) {
986
- // Is the background, so we ignore it
987
- continue;
988
- }
989
-
990
- let scores = softmax(cls.data);
991
- let pred_score = scores[pred_label];
992
- if (pred_score > object_mask_threshold) {
993
- mask_probs_item.push(mask);
994
- pred_scores_item.push(pred_score);
995
- pred_labels_item.push(pred_label);
996
- }
997
- }
998
-
999
- return [mask_probs_item, pred_scores_item, pred_labels_item];
1000
-
1001
- }
1002
-
1003
- /**
1004
- * Checks whether the segment is valid or not.
1005
- * @param {Int32Array} mask_labels Labels for each pixel in the mask.
1006
- * @param {Tensor[]} mask_probs Probabilities for each pixel in the masks.
1007
- * @param {number} k The class id of the segment.
1008
- * @param {number} mask_threshold The mask threshold.
1009
- * @param {number} overlap_mask_area_threshold The overlap mask area threshold.
1010
- * @returns {[boolean, number[]]} Whether the segment is valid or not, and the indices of the valid labels.
1011
- */
1012
- check_segment_validity(
1013
- mask_labels,
1014
- mask_probs,
1015
- k,
1016
- mask_threshold = 0.5,
1017
- overlap_mask_area_threshold = 0.8
1018
- ) {
1019
- // mask_k is a 1D array of indices, indicating where the mask is equal to k
1020
- let mask_k = [];
1021
- let mask_k_area = 0;
1022
- let original_area = 0;
1023
-
1024
- const mask_probs_k_data = mask_probs[k].data;
1025
-
1026
- // Compute the area of all the stuff in query k
1027
- for (let i = 0; i < mask_labels.length; ++i) {
1028
- if (mask_labels[i] === k) {
1029
- mask_k.push(i);
1030
- ++mask_k_area;
1031
- }
1032
-
1033
- if (mask_probs_k_data[i] >= mask_threshold) {
1034
- ++original_area;
1035
- }
1036
- }
1037
- let mask_exists = mask_k_area > 0 && original_area > 0;
1038
-
1039
- // Eliminate disconnected tiny segments
1040
- if (mask_exists) {
1041
- // Perform additional check
1042
- let area_ratio = mask_k_area / original_area;
1043
- mask_exists = area_ratio > overlap_mask_area_threshold;
1044
- }
1045
-
1046
- return [mask_exists, mask_k]
1249
+ /** @type {typeof post_process_panoptic_segmentation} */
1250
+ post_process_panoptic_segmentation(...args) {
1251
+ return post_process_panoptic_segmentation(...args);
1047
1252
  }
1048
1253
 
1049
- /**
1050
- * Computes the segments.
1051
- * @param {Tensor[]} mask_probs The mask probabilities.
1052
- * @param {number[]} pred_scores The predicted scores.
1053
- * @param {number[]} pred_labels The predicted labels.
1054
- * @param {number} mask_threshold The mask threshold.
1055
- * @param {number} overlap_mask_area_threshold The overlap mask area threshold.
1056
- * @param {Set<number>} label_ids_to_fuse The label ids to fuse.
1057
- * @param {number[]} target_size The target size of the image.
1058
- * @returns {[Tensor, Array<{id: number, label_id: number, score: number}>]} The computed segments.
1059
- */
1060
- compute_segments(
1061
- mask_probs,
1062
- pred_scores,
1063
- pred_labels,
1064
- mask_threshold,
1065
- overlap_mask_area_threshold,
1066
- label_ids_to_fuse = null,
1067
- target_size = null,
1068
- ) {
1069
- let [height, width] = target_size ?? mask_probs[0].dims;
1070
-
1071
- let segmentation = new Tensor(
1072
- 'int32',
1073
- new Int32Array(height * width),
1074
- [height, width]
1075
- );
1076
- let segments = [];
1077
-
1078
- // 1. If target_size is not null, we need to resize the masks to the target size
1079
- if (target_size !== null) {
1080
- // resize the masks to the target size
1081
- for (let i = 0; i < mask_probs.length; ++i) {
1082
- mask_probs[i] = interpolate(mask_probs[i], target_size, 'bilinear', false);
1083
- }
1084
- }
1085
-
1086
- // 2. Weigh each mask by its prediction score
1087
- // NOTE: `mask_probs` is updated in-place
1088
- //
1089
- // Temporary storage for the best label/scores for each pixel ([height, width]):
1090
- let mask_labels = new Int32Array(mask_probs[0].data.length);
1091
- let bestScores = new Float32Array(mask_probs[0].data.length);
1092
-
1093
- for (let i = 0; i < mask_probs.length; ++i) {
1094
- let score = pred_scores[i];
1095
-
1096
- const mask_probs_i_data = mask_probs[i].data;
1097
-
1098
- for (let j = 0; j < mask_probs_i_data.length; ++j) {
1099
- mask_probs_i_data[j] *= score
1100
- if (mask_probs_i_data[j] > bestScores[j]) {
1101
- mask_labels[j] = i;
1102
- bestScores[j] = mask_probs_i_data[j];
1103
- }
1104
- }
1105
- }
1106
-
1107
- let current_segment_id = 0;
1108
-
1109
- // let stuff_memory_list = {}
1110
- const segmentation_data = segmentation.data;
1111
- for (let k = 0; k < pred_labels.length; ++k) {
1112
- let pred_class = pred_labels[k];
1113
-
1114
- // TODO add `should_fuse`
1115
- // let should_fuse = pred_class in label_ids_to_fuse
1116
-
1117
- // Check if mask exists and large enough to be a segment
1118
- let [mask_exists, mask_k] = this.check_segment_validity(
1119
- mask_labels,
1120
- mask_probs,
1121
- k,
1122
- mask_threshold,
1123
- overlap_mask_area_threshold
1124
- )
1125
-
1126
- if (!mask_exists) {
1127
- // Nothing to see here
1128
- continue;
1129
- }
1130
-
1131
- // TODO
1132
- // if (pred_class in stuff_memory_list) {
1133
- // current_segment_id = stuff_memory_list[pred_class]
1134
- // } else {
1135
- // current_segment_id += 1;
1136
- // }
1137
- ++current_segment_id;
1138
-
1139
-
1140
- // Add current object segment to final segmentation map
1141
- for (let index of mask_k) {
1142
- segmentation_data[index] = current_segment_id;
1143
- }
1144
-
1145
- segments.push({
1146
- id: current_segment_id,
1147
- label_id: pred_class,
1148
- // was_fused: should_fuse, TODO
1149
- score: pred_scores[k],
1150
- })
1151
-
1152
- // TODO
1153
- // if(should_fuse){
1154
- // stuff_memory_list[pred_class] = current_segment_id
1155
- // }
1156
- }
1157
-
1158
- return [segmentation, segments];
1254
+ post_process_instance_segmentation() {
1255
+ // TODO
1256
+ throw Error("Not implemented yet");
1159
1257
  }
1258
+ }
1160
1259
 
1161
- /**
1162
- * Post-process the model output to generate the final panoptic segmentation.
1163
- * @param {*} outputs The model output to post process
1164
- * @param {number} [threshold=0.5] The probability score threshold to keep predicted instance masks.
1165
- * @param {number} [mask_threshold=0.5] Threshold to use when turning the predicted masks into binary values.
1166
- * @param {number} [overlap_mask_area_threshold=0.8] The overlap mask area threshold to merge or discard small disconnected parts within each binary instance mask.
1167
- * @param {Set<number>} [label_ids_to_fuse=null] The labels in this state will have all their instances be fused together.
1168
- * @param {number[][]} [target_sizes=null] The target sizes to resize the masks to.
1169
- * @returns {Array<{ segmentation: Tensor, segments_info: Array<{id: number, label_id: number, score: number}>}>}
1170
- */
1171
- post_process_panoptic_segmentation(
1172
- outputs,
1173
- threshold = 0.5,
1174
- mask_threshold = 0.5,
1175
- overlap_mask_area_threshold = 0.8,
1176
- label_ids_to_fuse = null,
1177
- target_sizes = null,
1178
- ) {
1179
- if (label_ids_to_fuse === null) {
1180
- console.warn("`label_ids_to_fuse` unset. No instance will be fused.")
1181
- label_ids_to_fuse = new Set();
1182
- }
1183
-
1184
- const class_queries_logits = outputs.logits; // [batch_size, num_queries, num_classes+1]
1185
- const masks_queries_logits = outputs.pred_masks; // [batch_size, num_queries, height, width]
1186
-
1187
- const mask_probs = masks_queries_logits.sigmoid() // [batch_size, num_queries, height, width]
1188
-
1189
- let [batch_size, num_queries, num_labels] = class_queries_logits.dims;
1190
- num_labels -= 1; // Remove last class (background)
1191
-
1192
- if (target_sizes !== null && target_sizes.length !== batch_size) {
1193
- throw Error("Make sure that you pass in as many target sizes as the batch dimension of the logits")
1194
- }
1195
-
1196
- let toReturn = [];
1197
- for (let i = 0; i < batch_size; ++i) {
1198
- let target_size = target_sizes !== null ? target_sizes[i] : null;
1199
-
1200
- let class_logits = class_queries_logits[i];
1201
- let mask_logits = mask_probs[i];
1202
-
1203
- let [mask_probs_item, pred_scores_item, pred_labels_item] = this.remove_low_and_no_objects(class_logits, mask_logits, threshold, num_labels);
1204
-
1205
- if (pred_labels_item.length === 0) {
1206
- // No mask found
1207
- let [height, width] = target_size ?? mask_logits.dims.slice(-2);
1208
-
1209
- let segmentation = new Tensor(
1210
- 'int32',
1211
- new Int32Array(height * width).fill(-1),
1212
- [height, width]
1213
- )
1214
- toReturn.push({
1215
- segmentation: segmentation,
1216
- segments_info: []
1217
- });
1218
- continue;
1219
- }
1220
-
1221
-
1222
- // Get segmentation map and segment information of batch item
1223
- let [segmentation, segments] = this.compute_segments(
1224
- mask_probs_item,
1225
- pred_scores_item,
1226
- pred_labels_item,
1227
- mask_threshold,
1228
- overlap_mask_area_threshold,
1229
- label_ids_to_fuse,
1230
- target_size,
1231
- )
1232
-
1233
- toReturn.push({
1234
- segmentation: segmentation,
1235
- segments_info: segments
1236
- })
1237
- }
1260
+ export class MaskFormerFeatureExtractor extends ImageFeatureExtractor {
1238
1261
 
1239
- return toReturn;
1262
+ /** @type {typeof post_process_panoptic_segmentation} */
1263
+ post_process_panoptic_segmentation(...args) {
1264
+ return post_process_panoptic_segmentation(...args);
1240
1265
  }
1241
1266
 
1242
1267
  post_process_instance_segmentation() {
@@ -1245,6 +1270,7 @@ export class DetrFeatureExtractor extends ImageFeatureExtractor {
1245
1270
  }
1246
1271
  }
1247
1272
 
1273
+
1248
1274
  export class YolosFeatureExtractor extends ImageFeatureExtractor {
1249
1275
  /** @type {typeof post_process_object_detection} */
1250
1276
  post_process_object_detection(...args) {
@@ -2534,11 +2560,13 @@ export class AutoProcessor {
2534
2560
  BitImageProcessor,
2535
2561
  DPTImageProcessor,
2536
2562
  DPTFeatureExtractor,
2563
+ PvtImageProcessor,
2537
2564
  GLPNFeatureExtractor,
2538
2565
  BeitFeatureExtractor,
2539
2566
  DeiTFeatureExtractor,
2540
2567
  DetrFeatureExtractor,
2541
2568
  RTDetrImageProcessor,
2569
+ MaskFormerFeatureExtractor,
2542
2570
  YolosFeatureExtractor,
2543
2571
  DonutFeatureExtractor,
2544
2572
  NougatImageProcessor,