@huggingface/tasks 0.12.22 → 0.12.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +29 -11
- package/dist/index.js +29 -11
- package/dist/src/model-libraries-snippets.d.ts.map +1 -1
- package/dist/src/tasks/image-segmentation/data.d.ts.map +1 -1
- package/dist/src/tasks/object-detection/data.d.ts.map +1 -1
- package/dist/src/tasks/token-classification/inference.d.ts +8 -5
- package/dist/src/tasks/token-classification/inference.d.ts.map +1 -1
- package/package.json +1 -1
- package/src/model-libraries-snippets.ts +21 -2
- package/src/tasks/image-segmentation/about.md +1 -1
- package/src/tasks/image-segmentation/data.ts +7 -6
- package/src/tasks/object-detection/data.ts +5 -4
- package/src/tasks/token-classification/inference.ts +8 -5
- package/src/tasks/token-classification/spec/output.json +6 -2
package/dist/index.cjs
CHANGED
|
@@ -2311,21 +2311,21 @@ var taskData12 = {
|
|
|
2311
2311
|
models: [
|
|
2312
2312
|
{
|
|
2313
2313
|
// TO DO: write description
|
|
2314
|
-
description: "Solid
|
|
2315
|
-
id: "
|
|
2314
|
+
description: "Solid semantic segmentation model trained on ADE20k.",
|
|
2315
|
+
id: "openmmlab/upernet-convnext-small"
|
|
2316
2316
|
},
|
|
2317
2317
|
{
|
|
2318
2318
|
description: "Background removal model.",
|
|
2319
2319
|
id: "briaai/RMBG-1.4"
|
|
2320
2320
|
},
|
|
2321
|
-
{
|
|
2322
|
-
description: "Semantic segmentation model trained on ADE20k benchmark dataset with 512x512 resolution.",
|
|
2323
|
-
id: "nvidia/segformer-b0-finetuned-ade-512-512"
|
|
2324
|
-
},
|
|
2325
2321
|
{
|
|
2326
2322
|
description: "A multipurpose image segmentation model for high resolution images.",
|
|
2327
2323
|
id: "ZhengPeng7/BiRefNet"
|
|
2328
2324
|
},
|
|
2325
|
+
{
|
|
2326
|
+
description: "Semantic segmentation model trained on ADE20k dataset.",
|
|
2327
|
+
id: "nvidia/segformer-b0-finetuned-ade-512-512"
|
|
2328
|
+
},
|
|
2329
2329
|
{
|
|
2330
2330
|
description: "Panoptic segmentation model trained COCO (common objects) dataset.",
|
|
2331
2331
|
id: "facebook/mask2former-swin-large-coco-panoptic"
|
|
@@ -2457,15 +2457,15 @@ var taskData14 = {
|
|
|
2457
2457
|
],
|
|
2458
2458
|
models: [
|
|
2459
2459
|
{
|
|
2460
|
-
description: "Solid object detection model trained on the
|
|
2460
|
+
description: "Solid object detection model pre-trained on the COCO 2017 dataset.",
|
|
2461
2461
|
id: "facebook/detr-resnet-50"
|
|
2462
2462
|
},
|
|
2463
2463
|
{
|
|
2464
|
-
description: "
|
|
2465
|
-
id: "
|
|
2464
|
+
description: "Real-time and accurate object detection model.",
|
|
2465
|
+
id: "jameslahm/yolov10x"
|
|
2466
2466
|
},
|
|
2467
2467
|
{
|
|
2468
|
-
description: "Fast and accurate object detection model trained on COCO
|
|
2468
|
+
description: "Fast and accurate object detection model trained on COCO and Object365 datasets.",
|
|
2469
2469
|
id: "PekingU/rtdetr_r18vd_coco_o365"
|
|
2470
2470
|
}
|
|
2471
2471
|
],
|
|
@@ -5118,12 +5118,30 @@ with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16):
|
|
|
5118
5118
|
var sampleFactory = (model) => [
|
|
5119
5119
|
`python -m sample_factory.huggingface.load_from_hub -r ${model.id} -d ./train_dir`
|
|
5120
5120
|
];
|
|
5121
|
+
function get_widget_examples_from_st_model(model) {
|
|
5122
|
+
const widgetExample = model.widgetData?.[0];
|
|
5123
|
+
if (widgetExample) {
|
|
5124
|
+
return [widgetExample.source_sentence, ...widgetExample.sentences];
|
|
5125
|
+
}
|
|
5126
|
+
}
|
|
5121
5127
|
var sentenceTransformers = (model) => {
|
|
5122
5128
|
const remote_code_snippet = model.tags.includes(TAG_CUSTOM_CODE) ? ", trust_remote_code=True" : "";
|
|
5129
|
+
const exampleSentences = get_widget_examples_from_st_model(model) ?? [
|
|
5130
|
+
"The weather is lovely today.",
|
|
5131
|
+
"It's so sunny outside!",
|
|
5132
|
+
"He drove to the stadium."
|
|
5133
|
+
];
|
|
5123
5134
|
return [
|
|
5124
5135
|
`from sentence_transformers import SentenceTransformer
|
|
5125
5136
|
|
|
5126
|
-
model = SentenceTransformer("${model.id}"${remote_code_snippet})
|
|
5137
|
+
model = SentenceTransformer("${model.id}"${remote_code_snippet})
|
|
5138
|
+
|
|
5139
|
+
sentences = ${JSON.stringify(exampleSentences, null, 4)}
|
|
5140
|
+
embeddings = model.encode(sentences)
|
|
5141
|
+
|
|
5142
|
+
similarities = model.similarity(embeddings, embeddings)
|
|
5143
|
+
print(similarities.shape)
|
|
5144
|
+
# [${exampleSentences.length}, ${exampleSentences.length}]`
|
|
5127
5145
|
];
|
|
5128
5146
|
};
|
|
5129
5147
|
var setfit = (model) => [
|
package/dist/index.js
CHANGED
|
@@ -2273,21 +2273,21 @@ var taskData12 = {
|
|
|
2273
2273
|
models: [
|
|
2274
2274
|
{
|
|
2275
2275
|
// TO DO: write description
|
|
2276
|
-
description: "Solid
|
|
2277
|
-
id: "
|
|
2276
|
+
description: "Solid semantic segmentation model trained on ADE20k.",
|
|
2277
|
+
id: "openmmlab/upernet-convnext-small"
|
|
2278
2278
|
},
|
|
2279
2279
|
{
|
|
2280
2280
|
description: "Background removal model.",
|
|
2281
2281
|
id: "briaai/RMBG-1.4"
|
|
2282
2282
|
},
|
|
2283
|
-
{
|
|
2284
|
-
description: "Semantic segmentation model trained on ADE20k benchmark dataset with 512x512 resolution.",
|
|
2285
|
-
id: "nvidia/segformer-b0-finetuned-ade-512-512"
|
|
2286
|
-
},
|
|
2287
2283
|
{
|
|
2288
2284
|
description: "A multipurpose image segmentation model for high resolution images.",
|
|
2289
2285
|
id: "ZhengPeng7/BiRefNet"
|
|
2290
2286
|
},
|
|
2287
|
+
{
|
|
2288
|
+
description: "Semantic segmentation model trained on ADE20k dataset.",
|
|
2289
|
+
id: "nvidia/segformer-b0-finetuned-ade-512-512"
|
|
2290
|
+
},
|
|
2291
2291
|
{
|
|
2292
2292
|
description: "Panoptic segmentation model trained COCO (common objects) dataset.",
|
|
2293
2293
|
id: "facebook/mask2former-swin-large-coco-panoptic"
|
|
@@ -2419,15 +2419,15 @@ var taskData14 = {
|
|
|
2419
2419
|
],
|
|
2420
2420
|
models: [
|
|
2421
2421
|
{
|
|
2422
|
-
description: "Solid object detection model trained on the
|
|
2422
|
+
description: "Solid object detection model pre-trained on the COCO 2017 dataset.",
|
|
2423
2423
|
id: "facebook/detr-resnet-50"
|
|
2424
2424
|
},
|
|
2425
2425
|
{
|
|
2426
|
-
description: "
|
|
2427
|
-
id: "
|
|
2426
|
+
description: "Real-time and accurate object detection model.",
|
|
2427
|
+
id: "jameslahm/yolov10x"
|
|
2428
2428
|
},
|
|
2429
2429
|
{
|
|
2430
|
-
description: "Fast and accurate object detection model trained on COCO
|
|
2430
|
+
description: "Fast and accurate object detection model trained on COCO and Object365 datasets.",
|
|
2431
2431
|
id: "PekingU/rtdetr_r18vd_coco_o365"
|
|
2432
2432
|
}
|
|
2433
2433
|
],
|
|
@@ -5080,12 +5080,30 @@ with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16):
|
|
|
5080
5080
|
var sampleFactory = (model) => [
|
|
5081
5081
|
`python -m sample_factory.huggingface.load_from_hub -r ${model.id} -d ./train_dir`
|
|
5082
5082
|
];
|
|
5083
|
+
function get_widget_examples_from_st_model(model) {
|
|
5084
|
+
const widgetExample = model.widgetData?.[0];
|
|
5085
|
+
if (widgetExample) {
|
|
5086
|
+
return [widgetExample.source_sentence, ...widgetExample.sentences];
|
|
5087
|
+
}
|
|
5088
|
+
}
|
|
5083
5089
|
var sentenceTransformers = (model) => {
|
|
5084
5090
|
const remote_code_snippet = model.tags.includes(TAG_CUSTOM_CODE) ? ", trust_remote_code=True" : "";
|
|
5091
|
+
const exampleSentences = get_widget_examples_from_st_model(model) ?? [
|
|
5092
|
+
"The weather is lovely today.",
|
|
5093
|
+
"It's so sunny outside!",
|
|
5094
|
+
"He drove to the stadium."
|
|
5095
|
+
];
|
|
5085
5096
|
return [
|
|
5086
5097
|
`from sentence_transformers import SentenceTransformer
|
|
5087
5098
|
|
|
5088
|
-
model = SentenceTransformer("${model.id}"${remote_code_snippet})
|
|
5099
|
+
model = SentenceTransformer("${model.id}"${remote_code_snippet})
|
|
5100
|
+
|
|
5101
|
+
sentences = ${JSON.stringify(exampleSentences, null, 4)}
|
|
5102
|
+
embeddings = model.encode(sentences)
|
|
5103
|
+
|
|
5104
|
+
similarities = model.similarity(embeddings, embeddings)
|
|
5105
|
+
print(similarities.shape)
|
|
5106
|
+
# [${exampleSentences.length}, ${exampleSentences.length}]`
|
|
5089
5107
|
];
|
|
5090
5108
|
};
|
|
5091
5109
|
var setfit = (model) => [
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAe9C,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAaF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,iBAAiB,UAAW,SAAS,KAAG,MAAM,EA6C1D,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAwBlD,CAAC;AAuCF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EAwCrD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EAmBrD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAS9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,OAAO,QAA6B,MAAM,EAQtD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAanC,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EA2B7C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;
|
|
1
|
+
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAe9C,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAaF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,iBAAiB,UAAW,SAAS,KAAG,MAAM,EA6C1D,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAwBlD,CAAC;AAuCF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EAwCrD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EAmBrD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAS9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,OAAO,QAA6B,MAAM,EAQtD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAanC,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EA2B7C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AASF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAoB7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EA4CrD,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,OAAO,QAAO,MAAM,EAYhC,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAOhD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAYjD,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAK5C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AA6BF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAUnD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAYnC,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"data.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-segmentation/data.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,IAAI,CAAC;AAEzC,QAAA,MAAM,QAAQ,EAAE,
|
|
1
|
+
{"version":3,"file":"data.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-segmentation/data.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,IAAI,CAAC;AAEzC,QAAA,MAAM,QAAQ,EAAE,cA+Ff,CAAC;AAEF,eAAe,QAAQ,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"data.d.ts","sourceRoot":"","sources":["../../../../src/tasks/object-detection/data.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,IAAI,CAAC;AAEzC,QAAA,MAAM,QAAQ,EAAE,
|
|
1
|
+
{"version":3,"file":"data.d.ts","sourceRoot":"","sources":["../../../../src/tasks/object-detection/data.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,IAAI,CAAC;AAEzC,QAAA,MAAM,QAAQ,EAAE,cAkFf,CAAC;AAEF,eAAe,QAAQ,CAAC"}
|
|
@@ -60,12 +60,15 @@ export interface TokenClassificationOutputElement {
|
|
|
60
60
|
/**
|
|
61
61
|
* The character position in the input where this group ends.
|
|
62
62
|
*/
|
|
63
|
-
end
|
|
63
|
+
end: number;
|
|
64
64
|
/**
|
|
65
|
-
* The predicted label for
|
|
65
|
+
* The predicted label for a single token
|
|
66
|
+
*/
|
|
67
|
+
entity?: string;
|
|
68
|
+
/**
|
|
69
|
+
* The predicted label for a group of one or more tokens
|
|
66
70
|
*/
|
|
67
71
|
entity_group?: string;
|
|
68
|
-
label: unknown;
|
|
69
72
|
/**
|
|
70
73
|
* The associated score / probability
|
|
71
74
|
*/
|
|
@@ -73,11 +76,11 @@ export interface TokenClassificationOutputElement {
|
|
|
73
76
|
/**
|
|
74
77
|
* The character position in the input where this group begins.
|
|
75
78
|
*/
|
|
76
|
-
start
|
|
79
|
+
start: number;
|
|
77
80
|
/**
|
|
78
81
|
* The corresponding text
|
|
79
82
|
*/
|
|
80
|
-
word
|
|
83
|
+
word: string;
|
|
81
84
|
[property: string]: unknown;
|
|
82
85
|
}
|
|
83
86
|
//# sourceMappingURL=inference.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/token-classification/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACxC;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,6BAA6B,CAAC;IAC3C,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;;;GAIG;AACH,MAAM,WAAW,6BAA6B;IAC7C;;OAEG;IACH,oBAAoB,CAAC,EAAE,sCAAsC,CAAC;IAC9D;;OAEG;IACH,aAAa,CAAC,EAAE,MAAM,EAAE,CAAC;IACzB;;OAEG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;;;;;;;;;;;;GAaG;AACH,MAAM,MAAM,sCAAsC,GAAG,MAAM,GAAG,QAAQ,GAAG,OAAO,GAAG,SAAS,GAAG,KAAK,CAAC;AACrG,MAAM,MAAM,yBAAyB,GAAG,gCAAgC,EAAE,CAAC;AAC3E;;GAEG;AACH,MAAM,WAAW,gCAAgC;IAChD;;OAEG;IACH,GAAG,
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/token-classification/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACxC;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,6BAA6B,CAAC;IAC3C,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;;;GAIG;AACH,MAAM,WAAW,6BAA6B;IAC7C;;OAEG;IACH,oBAAoB,CAAC,EAAE,sCAAsC,CAAC;IAC9D;;OAEG;IACH,aAAa,CAAC,EAAE,MAAM,EAAE,CAAC;IACzB;;OAEG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;;;;;;;;;;;;GAaG;AACH,MAAM,MAAM,sCAAsC,GAAG,MAAM,GAAG,QAAQ,GAAG,OAAO,GAAG,SAAS,GAAG,KAAK,CAAC;AACrG,MAAM,MAAM,yBAAyB,GAAG,gCAAgC,EAAE,CAAC;AAC3E;;GAEG;AACH,MAAM,WAAW,gCAAgC;IAChD;;OAEG;IACH,GAAG,EAAE,MAAM,CAAC;IACZ;;OAEG;IACH,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@huggingface/tasks",
|
|
3
3
|
"packageManager": "pnpm@8.10.5",
|
|
4
|
-
"version": "0.12.
|
|
4
|
+
"version": "0.12.23",
|
|
5
5
|
"description": "List of ML tasks for huggingface.co/tasks",
|
|
6
6
|
"repository": "https://github.com/huggingface/huggingface.js.git",
|
|
7
7
|
"publishConfig": {
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import type { ModelData } from "./model-data";
|
|
2
|
-
import type { WidgetExampleTextInput } from "./widget-example";
|
|
2
|
+
import type { WidgetExampleTextInput, WidgetExampleSentenceSimilarityInput } from "./widget-example";
|
|
3
3
|
import { LIBRARY_TASK_MAPPING } from "./library-to-tasks";
|
|
4
4
|
|
|
5
5
|
const TAG_CUSTOM_CODE = "custom_code";
|
|
@@ -704,13 +704,32 @@ export const sampleFactory = (model: ModelData): string[] => [
|
|
|
704
704
|
`python -m sample_factory.huggingface.load_from_hub -r ${model.id} -d ./train_dir`,
|
|
705
705
|
];
|
|
706
706
|
|
|
707
|
+
function get_widget_examples_from_st_model(model: ModelData): string[] | undefined {
|
|
708
|
+
const widgetExample = model.widgetData?.[0] as WidgetExampleSentenceSimilarityInput | undefined;
|
|
709
|
+
if (widgetExample) {
|
|
710
|
+
return [widgetExample.source_sentence, ...widgetExample.sentences];
|
|
711
|
+
}
|
|
712
|
+
}
|
|
713
|
+
|
|
707
714
|
export const sentenceTransformers = (model: ModelData): string[] => {
|
|
708
715
|
const remote_code_snippet = model.tags.includes(TAG_CUSTOM_CODE) ? ", trust_remote_code=True" : "";
|
|
716
|
+
const exampleSentences = get_widget_examples_from_st_model(model) ?? [
|
|
717
|
+
"The weather is lovely today.",
|
|
718
|
+
"It's so sunny outside!",
|
|
719
|
+
"He drove to the stadium.",
|
|
720
|
+
];
|
|
709
721
|
|
|
710
722
|
return [
|
|
711
723
|
`from sentence_transformers import SentenceTransformer
|
|
712
724
|
|
|
713
|
-
model = SentenceTransformer("${model.id}"${remote_code_snippet})
|
|
725
|
+
model = SentenceTransformer("${model.id}"${remote_code_snippet})
|
|
726
|
+
|
|
727
|
+
sentences = ${JSON.stringify(exampleSentences, null, 4)}
|
|
728
|
+
embeddings = model.encode(sentences)
|
|
729
|
+
|
|
730
|
+
similarities = model.similarity(embeddings, embeddings)
|
|
731
|
+
print(similarities.shape)
|
|
732
|
+
# [${exampleSentences.length}, ${exampleSentences.length}]`,
|
|
714
733
|
];
|
|
715
734
|
};
|
|
716
735
|
|
|
@@ -48,7 +48,7 @@ import { HfInference } from "@huggingface/inference";
|
|
|
48
48
|
const inference = new HfInference(HF_TOKEN);
|
|
49
49
|
await inference.imageSegmentation({
|
|
50
50
|
data: await (await fetch("https://picsum.photos/300/300")).blob(),
|
|
51
|
-
model: "facebook/
|
|
51
|
+
model: "facebook/mask2former-swin-base-coco-panoptic",
|
|
52
52
|
});
|
|
53
53
|
```
|
|
54
54
|
|
|
@@ -44,21 +44,22 @@ const taskData: TaskDataCustom = {
|
|
|
44
44
|
models: [
|
|
45
45
|
{
|
|
46
46
|
// TO DO: write description
|
|
47
|
-
description:
|
|
48
|
-
|
|
47
|
+
description:
|
|
48
|
+
"Solid semantic segmentation model trained on ADE20k.",
|
|
49
|
+
id: "openmmlab/upernet-convnext-small",
|
|
49
50
|
},
|
|
50
51
|
{
|
|
51
52
|
description: "Background removal model.",
|
|
52
53
|
id: "briaai/RMBG-1.4",
|
|
53
54
|
},
|
|
54
|
-
{
|
|
55
|
-
description: "Semantic segmentation model trained on ADE20k benchmark dataset with 512x512 resolution.",
|
|
56
|
-
id: "nvidia/segformer-b0-finetuned-ade-512-512",
|
|
57
|
-
},
|
|
58
55
|
{
|
|
59
56
|
description: "A multipurpose image segmentation model for high resolution images.",
|
|
60
57
|
id: "ZhengPeng7/BiRefNet",
|
|
61
58
|
},
|
|
59
|
+
{
|
|
60
|
+
description: "Semantic segmentation model trained on ADE20k dataset.",
|
|
61
|
+
id: "nvidia/segformer-b0-finetuned-ade-512-512",
|
|
62
|
+
},
|
|
62
63
|
{
|
|
63
64
|
description: "Panoptic segmentation model trained COCO (common objects) dataset.",
|
|
64
65
|
id: "facebook/mask2former-swin-large-coco-panoptic",
|
|
@@ -43,15 +43,16 @@ const taskData: TaskDataCustom = {
|
|
|
43
43
|
],
|
|
44
44
|
models: [
|
|
45
45
|
{
|
|
46
|
-
description: "Solid object detection model trained on the
|
|
46
|
+
description: "Solid object detection model pre-trained on the COCO 2017 dataset.",
|
|
47
47
|
id: "facebook/detr-resnet-50",
|
|
48
48
|
},
|
|
49
49
|
{
|
|
50
|
-
description: "
|
|
51
|
-
id: "
|
|
50
|
+
description: "Real-time and accurate object detection model.",
|
|
51
|
+
id: "jameslahm/yolov10x",
|
|
52
52
|
},
|
|
53
53
|
{
|
|
54
|
-
description:
|
|
54
|
+
description:
|
|
55
|
+
"Fast and accurate object detection model trained on COCO and Object365 datasets.",
|
|
55
56
|
id: "PekingU/rtdetr_r18vd_coco_o365",
|
|
56
57
|
},
|
|
57
58
|
],
|
|
@@ -60,12 +60,15 @@ export interface TokenClassificationOutputElement {
|
|
|
60
60
|
/**
|
|
61
61
|
* The character position in the input where this group ends.
|
|
62
62
|
*/
|
|
63
|
-
end
|
|
63
|
+
end: number;
|
|
64
64
|
/**
|
|
65
|
-
* The predicted label for
|
|
65
|
+
* The predicted label for a single token
|
|
66
|
+
*/
|
|
67
|
+
entity?: string;
|
|
68
|
+
/**
|
|
69
|
+
* The predicted label for a group of one or more tokens
|
|
66
70
|
*/
|
|
67
71
|
entity_group?: string;
|
|
68
|
-
label: unknown;
|
|
69
72
|
/**
|
|
70
73
|
* The associated score / probability
|
|
71
74
|
*/
|
|
@@ -73,10 +76,10 @@ export interface TokenClassificationOutputElement {
|
|
|
73
76
|
/**
|
|
74
77
|
* The character position in the input where this group begins.
|
|
75
78
|
*/
|
|
76
|
-
start
|
|
79
|
+
start: number;
|
|
77
80
|
/**
|
|
78
81
|
* The corresponding text
|
|
79
82
|
*/
|
|
80
|
-
word
|
|
83
|
+
word: string;
|
|
81
84
|
[property: string]: unknown;
|
|
82
85
|
}
|
|
@@ -9,7 +9,11 @@
|
|
|
9
9
|
"properties": {
|
|
10
10
|
"entity_group": {
|
|
11
11
|
"type": "string",
|
|
12
|
-
"description": "The predicted label for
|
|
12
|
+
"description": "The predicted label for a group of one or more tokens"
|
|
13
|
+
},
|
|
14
|
+
"entity": {
|
|
15
|
+
"type": "string",
|
|
16
|
+
"description": "The predicted label for a single token"
|
|
13
17
|
},
|
|
14
18
|
"score": {
|
|
15
19
|
"type": "number",
|
|
@@ -28,6 +32,6 @@
|
|
|
28
32
|
"description": "The character position in the input where this group ends."
|
|
29
33
|
}
|
|
30
34
|
},
|
|
31
|
-
"required": ["
|
|
35
|
+
"required": ["score", "word", "start", "end"]
|
|
32
36
|
}
|
|
33
37
|
}
|