@huggingface/tasks 0.11.12 → 0.11.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +64 -0
- package/dist/index.js +64 -0
- package/dist/src/model-libraries-snippets.d.ts +1 -0
- package/dist/src/model-libraries-snippets.d.ts.map +1 -1
- package/dist/src/model-libraries.d.ts +15 -2
- package/dist/src/model-libraries.d.ts.map +1 -1
- package/dist/src/pipelines.d.ts +12 -2
- package/dist/src/pipelines.d.ts.map +1 -1
- package/dist/src/tasks/audio-classification/inference.d.ts +3 -2
- package/dist/src/tasks/audio-classification/inference.d.ts.map +1 -1
- package/dist/src/tasks/automatic-speech-recognition/inference.d.ts +3 -2
- package/dist/src/tasks/automatic-speech-recognition/inference.d.ts.map +1 -1
- package/dist/src/tasks/image-classification/inference.d.ts +3 -2
- package/dist/src/tasks/image-classification/inference.d.ts.map +1 -1
- package/dist/src/tasks/image-segmentation/inference.d.ts +10 -6
- package/dist/src/tasks/image-segmentation/inference.d.ts.map +1 -1
- package/dist/src/tasks/image-to-image/inference.d.ts +6 -5
- package/dist/src/tasks/image-to-image/inference.d.ts.map +1 -1
- package/dist/src/tasks/index.d.ts +1 -1
- package/dist/src/tasks/index.d.ts.map +1 -1
- package/dist/src/tasks/keypoint-detection/data.d.ts +4 -0
- package/dist/src/tasks/keypoint-detection/data.d.ts.map +1 -0
- package/dist/src/tasks/object-detection/inference.d.ts +17 -4
- package/dist/src/tasks/object-detection/inference.d.ts.map +1 -1
- package/dist/src/tasks/summarization/inference.d.ts +13 -12
- package/dist/src/tasks/summarization/inference.d.ts.map +1 -1
- package/dist/src/tasks/text-to-image/inference.d.ts +2 -2
- package/dist/src/tasks/translation/inference.d.ts +21 -10
- package/dist/src/tasks/translation/inference.d.ts.map +1 -1
- package/package.json +1 -1
- package/src/model-libraries-snippets.ts +42 -0
- package/src/model-libraries.ts +13 -0
- package/src/pipelines.ts +12 -0
- package/src/tasks/audio-classification/inference.ts +3 -2
- package/src/tasks/audio-classification/spec/input.json +2 -1
- package/src/tasks/audio-classification/spec/output.json +1 -0
- package/src/tasks/automatic-speech-recognition/inference.ts +3 -2
- package/src/tasks/automatic-speech-recognition/spec/input.json +2 -1
- package/src/tasks/common-definitions.json +3 -20
- package/src/tasks/image-classification/inference.ts +3 -2
- package/src/tasks/image-classification/spec/input.json +2 -1
- package/src/tasks/image-classification/spec/output.json +1 -0
- package/src/tasks/image-segmentation/inference.ts +10 -6
- package/src/tasks/image-segmentation/spec/input.json +3 -12
- package/src/tasks/image-segmentation/spec/output.json +4 -3
- package/src/tasks/image-to-image/inference.ts +6 -5
- package/src/tasks/image-to-image/spec/input.json +3 -2
- package/src/tasks/image-to-image/spec/output.json +1 -1
- package/src/tasks/index.ts +3 -6
- package/src/tasks/keypoint-detection/about.md +59 -0
- package/src/tasks/keypoint-detection/data.ts +46 -0
- package/src/tasks/object-detection/inference.ts +17 -4
- package/src/tasks/object-detection/spec/input.json +2 -1
- package/src/tasks/object-detection/spec/output.json +10 -6
- package/src/tasks/summarization/inference.ts +13 -12
- package/src/tasks/summarization/spec/input.json +37 -2
- package/src/tasks/text-classification/spec/output.json +1 -0
- package/src/tasks/text-to-image/inference.ts +2 -2
- package/src/tasks/text-to-image/spec/input.json +1 -1
- package/src/tasks/text-to-image/spec/output.json +1 -1
- package/src/tasks/translation/inference.ts +21 -10
- package/src/tasks/translation/spec/input.json +45 -2
- package/src/tasks/zero-shot-classification/spec/output.json +1 -0
package/dist/index.cjs
CHANGED
|
@@ -1392,6 +1392,18 @@ var PIPELINE_DATA = {
|
|
|
1392
1392
|
name: "Video-Text-to-Text",
|
|
1393
1393
|
modality: "multimodal",
|
|
1394
1394
|
color: "blue",
|
|
1395
|
+
hideInDatasets: false
|
|
1396
|
+
},
|
|
1397
|
+
"keypoint-detection": {
|
|
1398
|
+
name: "Keypoint Detection",
|
|
1399
|
+
subtasks: [
|
|
1400
|
+
{
|
|
1401
|
+
type: "pose-estimation",
|
|
1402
|
+
name: "Pose Estimation"
|
|
1403
|
+
}
|
|
1404
|
+
],
|
|
1405
|
+
modality: "cv",
|
|
1406
|
+
color: "red",
|
|
1395
1407
|
hideInDatasets: true
|
|
1396
1408
|
},
|
|
1397
1409
|
other: {
|
|
@@ -4209,6 +4221,7 @@ var TASKS_MODEL_LIBRARIES = {
|
|
|
4209
4221
|
"image-to-image": ["diffusers", "transformers", "transformers.js"],
|
|
4210
4222
|
"image-to-text": ["transformers", "transformers.js"],
|
|
4211
4223
|
"image-to-video": ["diffusers"],
|
|
4224
|
+
"keypoint-detection": ["transformers"],
|
|
4212
4225
|
"video-classification": ["transformers"],
|
|
4213
4226
|
"mask-generation": ["transformers"],
|
|
4214
4227
|
"multiple-choice": ["transformers"],
|
|
@@ -4277,6 +4290,7 @@ var TASKS_DATA = {
|
|
|
4277
4290
|
"image-text-to-text": getData("image-text-to-text", data_default11),
|
|
4278
4291
|
"image-to-text": getData("image-to-text", data_default10),
|
|
4279
4292
|
"image-to-video": void 0,
|
|
4293
|
+
"keypoint-detection": getData("keypoint-detection", data_default16),
|
|
4280
4294
|
"mask-generation": getData("mask-generation", data_default13),
|
|
4281
4295
|
"multiple-choice": void 0,
|
|
4282
4296
|
"object-detection": getData("object-detection", data_default14),
|
|
@@ -4461,6 +4475,43 @@ var diffusers = (model) => {
|
|
|
4461
4475
|
return diffusers_default(model);
|
|
4462
4476
|
}
|
|
4463
4477
|
};
|
|
4478
|
+
var diffusionkit = (model) => {
|
|
4479
|
+
const sd3Snippet = `# Pipeline for Stable Diffusion 3
|
|
4480
|
+
from diffusionkit.mlx import DiffusionPipeline
|
|
4481
|
+
|
|
4482
|
+
pipeline = DiffusionPipeline(
|
|
4483
|
+
shift=3.0,
|
|
4484
|
+
use_t5=False,
|
|
4485
|
+
model_version=${model.id},
|
|
4486
|
+
low_memory_mode=True,
|
|
4487
|
+
a16=True,
|
|
4488
|
+
w16=True,
|
|
4489
|
+
)`;
|
|
4490
|
+
const fluxSnippet = `# Pipeline for Flux
|
|
4491
|
+
from diffusionkit.mlx import FluxPipeline
|
|
4492
|
+
|
|
4493
|
+
pipeline = FluxPipeline(
|
|
4494
|
+
shift=1.0,
|
|
4495
|
+
model_version=${model.id},
|
|
4496
|
+
low_memory_mode=True,
|
|
4497
|
+
a16=True,
|
|
4498
|
+
w16=True,
|
|
4499
|
+
)`;
|
|
4500
|
+
const generateSnippet = `# Image Generation
|
|
4501
|
+
HEIGHT = 512
|
|
4502
|
+
WIDTH = 512
|
|
4503
|
+
NUM_STEPS = ${model.tags.includes("flux") ? 4 : 50}
|
|
4504
|
+
CFG_WEIGHT = ${model.tags.includes("flux") ? 0 : 5}
|
|
4505
|
+
|
|
4506
|
+
image, _ = pipeline.generate_image(
|
|
4507
|
+
"a photo of a cat",
|
|
4508
|
+
cfg_weight=CFG_WEIGHT,
|
|
4509
|
+
num_steps=NUM_STEPS,
|
|
4510
|
+
latent_size=(HEIGHT // 8, WIDTH // 8),
|
|
4511
|
+
)`;
|
|
4512
|
+
const pipelineSnippet = model.tags.includes("flux") ? fluxSnippet : sd3Snippet;
|
|
4513
|
+
return [pipelineSnippet, generateSnippet];
|
|
4514
|
+
};
|
|
4464
4515
|
var cartesia_pytorch = (model) => [
|
|
4465
4516
|
`# pip install --no-binary :all: cartesia-pytorch
|
|
4466
4517
|
from cartesia_pytorch import ReneLMHeadModel
|
|
@@ -5279,6 +5330,12 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
5279
5330
|
filter: true
|
|
5280
5331
|
/// diffusers has its own more complex "countDownloads" query
|
|
5281
5332
|
},
|
|
5333
|
+
diffusionkit: {
|
|
5334
|
+
prettyLabel: "DiffusionKit",
|
|
5335
|
+
repoName: "DiffusionKit",
|
|
5336
|
+
repoUrl: "https://github.com/argmaxinc/DiffusionKit",
|
|
5337
|
+
snippets: diffusionkit
|
|
5338
|
+
},
|
|
5282
5339
|
doctr: {
|
|
5283
5340
|
prettyLabel: "docTR",
|
|
5284
5341
|
repoName: "doctr",
|
|
@@ -5538,6 +5595,13 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
5538
5595
|
snippets: pyannote_audio,
|
|
5539
5596
|
filter: true
|
|
5540
5597
|
},
|
|
5598
|
+
"py-feat": {
|
|
5599
|
+
prettyLabel: "Py-Feat",
|
|
5600
|
+
repoName: "Py-Feat",
|
|
5601
|
+
repoUrl: "https://github.com/cosanlab/py-feat",
|
|
5602
|
+
docsUrl: "https://py-feat.org/",
|
|
5603
|
+
filter: false
|
|
5604
|
+
},
|
|
5541
5605
|
pythae: {
|
|
5542
5606
|
prettyLabel: "pythae",
|
|
5543
5607
|
repoName: "pythae",
|
package/dist/index.js
CHANGED
|
@@ -1354,6 +1354,18 @@ var PIPELINE_DATA = {
|
|
|
1354
1354
|
name: "Video-Text-to-Text",
|
|
1355
1355
|
modality: "multimodal",
|
|
1356
1356
|
color: "blue",
|
|
1357
|
+
hideInDatasets: false
|
|
1358
|
+
},
|
|
1359
|
+
"keypoint-detection": {
|
|
1360
|
+
name: "Keypoint Detection",
|
|
1361
|
+
subtasks: [
|
|
1362
|
+
{
|
|
1363
|
+
type: "pose-estimation",
|
|
1364
|
+
name: "Pose Estimation"
|
|
1365
|
+
}
|
|
1366
|
+
],
|
|
1367
|
+
modality: "cv",
|
|
1368
|
+
color: "red",
|
|
1357
1369
|
hideInDatasets: true
|
|
1358
1370
|
},
|
|
1359
1371
|
other: {
|
|
@@ -4171,6 +4183,7 @@ var TASKS_MODEL_LIBRARIES = {
|
|
|
4171
4183
|
"image-to-image": ["diffusers", "transformers", "transformers.js"],
|
|
4172
4184
|
"image-to-text": ["transformers", "transformers.js"],
|
|
4173
4185
|
"image-to-video": ["diffusers"],
|
|
4186
|
+
"keypoint-detection": ["transformers"],
|
|
4174
4187
|
"video-classification": ["transformers"],
|
|
4175
4188
|
"mask-generation": ["transformers"],
|
|
4176
4189
|
"multiple-choice": ["transformers"],
|
|
@@ -4239,6 +4252,7 @@ var TASKS_DATA = {
|
|
|
4239
4252
|
"image-text-to-text": getData("image-text-to-text", data_default11),
|
|
4240
4253
|
"image-to-text": getData("image-to-text", data_default10),
|
|
4241
4254
|
"image-to-video": void 0,
|
|
4255
|
+
"keypoint-detection": getData("keypoint-detection", data_default16),
|
|
4242
4256
|
"mask-generation": getData("mask-generation", data_default13),
|
|
4243
4257
|
"multiple-choice": void 0,
|
|
4244
4258
|
"object-detection": getData("object-detection", data_default14),
|
|
@@ -4423,6 +4437,43 @@ var diffusers = (model) => {
|
|
|
4423
4437
|
return diffusers_default(model);
|
|
4424
4438
|
}
|
|
4425
4439
|
};
|
|
4440
|
+
var diffusionkit = (model) => {
|
|
4441
|
+
const sd3Snippet = `# Pipeline for Stable Diffusion 3
|
|
4442
|
+
from diffusionkit.mlx import DiffusionPipeline
|
|
4443
|
+
|
|
4444
|
+
pipeline = DiffusionPipeline(
|
|
4445
|
+
shift=3.0,
|
|
4446
|
+
use_t5=False,
|
|
4447
|
+
model_version=${model.id},
|
|
4448
|
+
low_memory_mode=True,
|
|
4449
|
+
a16=True,
|
|
4450
|
+
w16=True,
|
|
4451
|
+
)`;
|
|
4452
|
+
const fluxSnippet = `# Pipeline for Flux
|
|
4453
|
+
from diffusionkit.mlx import FluxPipeline
|
|
4454
|
+
|
|
4455
|
+
pipeline = FluxPipeline(
|
|
4456
|
+
shift=1.0,
|
|
4457
|
+
model_version=${model.id},
|
|
4458
|
+
low_memory_mode=True,
|
|
4459
|
+
a16=True,
|
|
4460
|
+
w16=True,
|
|
4461
|
+
)`;
|
|
4462
|
+
const generateSnippet = `# Image Generation
|
|
4463
|
+
HEIGHT = 512
|
|
4464
|
+
WIDTH = 512
|
|
4465
|
+
NUM_STEPS = ${model.tags.includes("flux") ? 4 : 50}
|
|
4466
|
+
CFG_WEIGHT = ${model.tags.includes("flux") ? 0 : 5}
|
|
4467
|
+
|
|
4468
|
+
image, _ = pipeline.generate_image(
|
|
4469
|
+
"a photo of a cat",
|
|
4470
|
+
cfg_weight=CFG_WEIGHT,
|
|
4471
|
+
num_steps=NUM_STEPS,
|
|
4472
|
+
latent_size=(HEIGHT // 8, WIDTH // 8),
|
|
4473
|
+
)`;
|
|
4474
|
+
const pipelineSnippet = model.tags.includes("flux") ? fluxSnippet : sd3Snippet;
|
|
4475
|
+
return [pipelineSnippet, generateSnippet];
|
|
4476
|
+
};
|
|
4426
4477
|
var cartesia_pytorch = (model) => [
|
|
4427
4478
|
`# pip install --no-binary :all: cartesia-pytorch
|
|
4428
4479
|
from cartesia_pytorch import ReneLMHeadModel
|
|
@@ -5241,6 +5292,12 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
5241
5292
|
filter: true
|
|
5242
5293
|
/// diffusers has its own more complex "countDownloads" query
|
|
5243
5294
|
},
|
|
5295
|
+
diffusionkit: {
|
|
5296
|
+
prettyLabel: "DiffusionKit",
|
|
5297
|
+
repoName: "DiffusionKit",
|
|
5298
|
+
repoUrl: "https://github.com/argmaxinc/DiffusionKit",
|
|
5299
|
+
snippets: diffusionkit
|
|
5300
|
+
},
|
|
5244
5301
|
doctr: {
|
|
5245
5302
|
prettyLabel: "docTR",
|
|
5246
5303
|
repoName: "doctr",
|
|
@@ -5500,6 +5557,13 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
5500
5557
|
snippets: pyannote_audio,
|
|
5501
5558
|
filter: true
|
|
5502
5559
|
},
|
|
5560
|
+
"py-feat": {
|
|
5561
|
+
prettyLabel: "Py-Feat",
|
|
5562
|
+
repoName: "Py-Feat",
|
|
5563
|
+
repoUrl: "https://github.com/cosanlab/py-feat",
|
|
5564
|
+
docsUrl: "https://py-feat.org/",
|
|
5565
|
+
filter: false
|
|
5566
|
+
},
|
|
5503
5567
|
pythae: {
|
|
5504
5568
|
prettyLabel: "pythae",
|
|
5505
5569
|
repoName: "pythae",
|
|
@@ -7,6 +7,7 @@ export declare const bertopic: (model: ModelData) => string[];
|
|
|
7
7
|
export declare const bm25s: (model: ModelData) => string[];
|
|
8
8
|
export declare const depth_anything_v2: (model: ModelData) => string[];
|
|
9
9
|
export declare const diffusers: (model: ModelData) => string[];
|
|
10
|
+
export declare const diffusionkit: (model: ModelData) => string[];
|
|
10
11
|
export declare const cartesia_pytorch: (model: ModelData) => string[];
|
|
11
12
|
export declare const cartesia_mlx: (model: ModelData) => string[];
|
|
12
13
|
export declare const edsnlp: (model: ModelData) => string[];
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAY9C,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAMF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,iBAAiB,UAAW,SAAS,KAAG,MAAM,EA6C1D,CAAC;AA+BF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EAmBrD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAS9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,OAAO,QAA6B,MAAM,EAQtD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAanC,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EA2B7C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AAEF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAI7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EA4CrD,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,OAAO,QAAO,MAAM,EAYhC,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAYjD,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAK5C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AA6BF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAUnD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAYnC,CAAC"}
|
|
1
|
+
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAY9C,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAMF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,iBAAiB,UAAW,SAAS,KAAG,MAAM,EA6C1D,CAAC;AA+BF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EAwCrD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EAmBrD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAS9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,OAAO,QAA6B,MAAM,EAQtD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAanC,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EA2B7C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AAEF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAI7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EA4CrD,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,OAAO,QAAO,MAAM,EAYhC,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAYjD,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAK5C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AA6BF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAUnD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAYnC,CAAC"}
|
|
@@ -176,6 +176,12 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
176
176
|
snippets: (model: ModelData) => string[];
|
|
177
177
|
filter: true;
|
|
178
178
|
};
|
|
179
|
+
diffusionkit: {
|
|
180
|
+
prettyLabel: string;
|
|
181
|
+
repoName: string;
|
|
182
|
+
repoUrl: string;
|
|
183
|
+
snippets: (model: ModelData) => string[];
|
|
184
|
+
};
|
|
179
185
|
doctr: {
|
|
180
186
|
prettyLabel: string;
|
|
181
187
|
repoName: string;
|
|
@@ -434,6 +440,13 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
434
440
|
snippets: (model: ModelData) => string[];
|
|
435
441
|
filter: true;
|
|
436
442
|
};
|
|
443
|
+
"py-feat": {
|
|
444
|
+
prettyLabel: string;
|
|
445
|
+
repoName: string;
|
|
446
|
+
repoUrl: string;
|
|
447
|
+
docsUrl: string;
|
|
448
|
+
filter: false;
|
|
449
|
+
};
|
|
437
450
|
pythae: {
|
|
438
451
|
prettyLabel: string;
|
|
439
452
|
repoName: string;
|
|
@@ -648,6 +661,6 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
648
661
|
};
|
|
649
662
|
};
|
|
650
663
|
export type ModelLibraryKey = keyof typeof MODEL_LIBRARIES_UI_ELEMENTS;
|
|
651
|
-
export declare const ALL_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "deepforest" | "depth-anything-v2" | "diffree" | "diffusers" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
|
|
652
|
-
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "deepforest" | "depth-anything-v2" | "diffree" | "diffusers" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
|
|
664
|
+
export declare const ALL_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "deepforest" | "depth-anything-v2" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
|
|
665
|
+
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "deepforest" | "depth-anything-v2" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
|
|
653
666
|
//# sourceMappingURL=model-libraries.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B
|
|
1
|
+
{"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAgmBI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,4mCAAgE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,4mCAQ1B,CAAC"}
|
package/dist/src/pipelines.d.ts
CHANGED
|
@@ -391,6 +391,16 @@ export declare const PIPELINE_DATA: {
|
|
|
391
391
|
name: string;
|
|
392
392
|
modality: "multimodal";
|
|
393
393
|
color: "blue";
|
|
394
|
+
hideInDatasets: false;
|
|
395
|
+
};
|
|
396
|
+
"keypoint-detection": {
|
|
397
|
+
name: string;
|
|
398
|
+
subtasks: {
|
|
399
|
+
type: string;
|
|
400
|
+
name: string;
|
|
401
|
+
}[];
|
|
402
|
+
modality: "cv";
|
|
403
|
+
color: "red";
|
|
394
404
|
hideInDatasets: true;
|
|
395
405
|
};
|
|
396
406
|
other: {
|
|
@@ -403,7 +413,7 @@ export declare const PIPELINE_DATA: {
|
|
|
403
413
|
};
|
|
404
414
|
export type PipelineType = keyof typeof PIPELINE_DATA;
|
|
405
415
|
export type WidgetType = PipelineType | "conversational";
|
|
406
|
-
export declare const PIPELINE_TYPES: ("other" | "text-classification" | "token-classification" | "table-question-answering" | "question-answering" | "zero-shot-classification" | "translation" | "summarization" | "feature-extraction" | "text-generation" | "text2text-generation" | "fill-mask" | "sentence-similarity" | "text-to-speech" | "text-to-audio" | "automatic-speech-recognition" | "audio-to-audio" | "audio-classification" | "voice-activity-detection" | "depth-estimation" | "image-classification" | "object-detection" | "image-segmentation" | "text-to-image" | "image-to-text" | "image-to-image" | "image-to-video" | "unconditional-image-generation" | "video-classification" | "reinforcement-learning" | "robotics" | "tabular-classification" | "tabular-regression" | "tabular-to-text" | "table-to-text" | "multiple-choice" | "text-retrieval" | "time-series-forecasting" | "text-to-video" | "image-text-to-text" | "visual-question-answering" | "document-question-answering" | "zero-shot-image-classification" | "graph-ml" | "mask-generation" | "zero-shot-object-detection" | "text-to-3d" | "image-to-3d" | "image-feature-extraction" | "video-text-to-text")[];
|
|
416
|
+
export declare const PIPELINE_TYPES: ("other" | "text-classification" | "token-classification" | "table-question-answering" | "question-answering" | "zero-shot-classification" | "translation" | "summarization" | "feature-extraction" | "text-generation" | "text2text-generation" | "fill-mask" | "sentence-similarity" | "text-to-speech" | "text-to-audio" | "automatic-speech-recognition" | "audio-to-audio" | "audio-classification" | "voice-activity-detection" | "depth-estimation" | "image-classification" | "object-detection" | "image-segmentation" | "text-to-image" | "image-to-text" | "image-to-image" | "image-to-video" | "unconditional-image-generation" | "video-classification" | "reinforcement-learning" | "robotics" | "tabular-classification" | "tabular-regression" | "tabular-to-text" | "table-to-text" | "multiple-choice" | "text-retrieval" | "time-series-forecasting" | "text-to-video" | "image-text-to-text" | "visual-question-answering" | "document-question-answering" | "zero-shot-image-classification" | "graph-ml" | "mask-generation" | "zero-shot-object-detection" | "text-to-3d" | "image-to-3d" | "image-feature-extraction" | "video-text-to-text" | "keypoint-detection")[];
|
|
407
417
|
export declare const SUBTASK_TYPES: string[];
|
|
408
|
-
export declare const PIPELINE_TYPES_SET: Set<"other" | "text-classification" | "token-classification" | "table-question-answering" | "question-answering" | "zero-shot-classification" | "translation" | "summarization" | "feature-extraction" | "text-generation" | "text2text-generation" | "fill-mask" | "sentence-similarity" | "text-to-speech" | "text-to-audio" | "automatic-speech-recognition" | "audio-to-audio" | "audio-classification" | "voice-activity-detection" | "depth-estimation" | "image-classification" | "object-detection" | "image-segmentation" | "text-to-image" | "image-to-text" | "image-to-image" | "image-to-video" | "unconditional-image-generation" | "video-classification" | "reinforcement-learning" | "robotics" | "tabular-classification" | "tabular-regression" | "tabular-to-text" | "table-to-text" | "multiple-choice" | "text-retrieval" | "time-series-forecasting" | "text-to-video" | "image-text-to-text" | "visual-question-answering" | "document-question-answering" | "zero-shot-image-classification" | "graph-ml" | "mask-generation" | "zero-shot-object-detection" | "text-to-3d" | "image-to-3d" | "image-feature-extraction" | "video-text-to-text">;
|
|
418
|
+
export declare const PIPELINE_TYPES_SET: Set<"other" | "text-classification" | "token-classification" | "table-question-answering" | "question-answering" | "zero-shot-classification" | "translation" | "summarization" | "feature-extraction" | "text-generation" | "text2text-generation" | "fill-mask" | "sentence-similarity" | "text-to-speech" | "text-to-audio" | "automatic-speech-recognition" | "audio-to-audio" | "audio-classification" | "voice-activity-detection" | "depth-estimation" | "image-classification" | "object-detection" | "image-segmentation" | "text-to-image" | "image-to-text" | "image-to-image" | "image-to-video" | "unconditional-image-generation" | "video-classification" | "reinforcement-learning" | "robotics" | "tabular-classification" | "tabular-regression" | "tabular-to-text" | "table-to-text" | "multiple-choice" | "text-retrieval" | "time-series-forecasting" | "text-to-video" | "image-text-to-text" | "visual-question-answering" | "document-question-answering" | "zero-shot-image-classification" | "graph-ml" | "mask-generation" | "zero-shot-object-detection" | "text-to-3d" | "image-to-3d" | "image-feature-extraction" | "video-text-to-text" | "keypoint-detection">;
|
|
409
419
|
//# sourceMappingURL=pipelines.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"pipelines.d.ts","sourceRoot":"","sources":["../../src/pipelines.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,UAAU,yEAA0E,CAAC;AAElG,MAAM,MAAM,QAAQ,GAAG,CAAC,OAAO,UAAU,CAAC,CAAC,MAAM,CAAC,CAAC;AAEnD,eAAO,MAAM,eAAe;;;;;;;;CAQQ,CAAC;AAErC;;;;;;GAMG;AACH,MAAM,WAAW,OAAO;IACvB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;CACb;AAED;;;;;GAKG;AACH,MAAM,WAAW,YAAY;IAC5B;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC;IACrB,QAAQ,EAAE,QAAQ,CAAC;IACnB;;OAEG;IACH,KAAK,EAAE,MAAM,GAAG,OAAO,GAAG,QAAQ,GAAG,QAAQ,GAAG,KAAK,GAAG,QAAQ,CAAC;IACjE;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;CACzB;AAcD,eAAO,MAAM,aAAa
|
|
1
|
+
{"version":3,"file":"pipelines.d.ts","sourceRoot":"","sources":["../../src/pipelines.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,UAAU,yEAA0E,CAAC;AAElG,MAAM,MAAM,QAAQ,GAAG,CAAC,OAAO,UAAU,CAAC,CAAC,MAAM,CAAC,CAAC;AAEnD,eAAO,MAAM,eAAe;;;;;;;;CAQQ,CAAC;AAErC;;;;;;GAMG;AACH,MAAM,WAAW,OAAO;IACvB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;CACb;AAED;;;;;GAKG;AACH,MAAM,WAAW,YAAY;IAC5B;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC;IACrB,QAAQ,EAAE,QAAQ,CAAC;IACnB;;OAEG;IACH,KAAK,EAAE,MAAM,GAAG,OAAO,GAAG,QAAQ,GAAG,QAAQ,GAAG,KAAK,GAAG,QAAQ,CAAC;IACjE;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;CACzB;AAcD,eAAO,MAAM,aAAa;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAgmBc,CAAC;AAEzC,MAAM,MAAM,YAAY,GAAG,MAAM,OAAO,aAAa,CAAC;AAEtD,MAAM,MAAM,UAAU,GAAG,YAAY,GAAG,gBAAgB,CAAC;AAEzD,eAAO,MAAM,cAAc,ioCAA+C,CAAC;AAE3E,eAAO,MAAM,aAAa,UAEN,CAAC;AAErB,eAAO,MAAM,kBAAkB,koCAA0B,CAAC"}
|
|
@@ -8,9 +8,10 @@
|
|
|
8
8
|
*/
|
|
9
9
|
export interface AudioClassificationInput {
|
|
10
10
|
/**
|
|
11
|
-
* The input audio data
|
|
11
|
+
* The input audio data as a base64-encoded string. If no `parameters` are provided, you can
|
|
12
|
+
* also provide the audio data as a raw bytes payload.
|
|
12
13
|
*/
|
|
13
|
-
inputs:
|
|
14
|
+
inputs: string;
|
|
14
15
|
/**
|
|
15
16
|
* Additional inference parameters
|
|
16
17
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/audio-classification/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACxC
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/audio-classification/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACxC;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,6BAA6B,CAAC;IAC3C,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;;;GAIG;AACH,MAAM,WAAW,6BAA6B;IAC7C,iBAAiB,CAAC,EAAE,6BAA6B,CAAC;IAClD;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;GAEG;AACH,MAAM,MAAM,6BAA6B,GAAG,SAAS,GAAG,SAAS,GAAG,MAAM,CAAC;AAC3E,MAAM,MAAM,yBAAyB,GAAG,gCAAgC,EAAE,CAAC;AAC3E;;GAEG;AACH,MAAM,WAAW,gCAAgC;IAChD;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
|
@@ -8,9 +8,10 @@
|
|
|
8
8
|
*/
|
|
9
9
|
export interface AutomaticSpeechRecognitionInput {
|
|
10
10
|
/**
|
|
11
|
-
* The input audio data
|
|
11
|
+
* The input audio data as a base64-encoded string. If no `parameters` are provided, you can
|
|
12
|
+
* also provide the audio data as a raw bytes payload.
|
|
12
13
|
*/
|
|
13
|
-
inputs:
|
|
14
|
+
inputs: string;
|
|
14
15
|
/**
|
|
15
16
|
* Additional inference parameters
|
|
16
17
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/automatic-speech-recognition/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,+BAA+B;IAC/C
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/automatic-speech-recognition/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,+BAA+B;IAC/C;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,oCAAoC,CAAC;IAClD,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,oCAAoC;IACpD;;OAEG;IACH,QAAQ,CAAC,EAAE,oBAAoB,CAAC;IAChC;;OAEG;IACH,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAC5B,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,oBAAoB;IACpC;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;;;OAKG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;;;;;;;OAQG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;;OAGG;IACH,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;;OAGG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;;;;;OAMG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,MAAM,kBAAkB,GAAG,OAAO,GAAG,OAAO,CAAC;AAEnD;;GAEG;AACH,MAAM,WAAW,gCAAgC;IAChD;;;OAGG;IACH,MAAM,CAAC,EAAE,qCAAqC,EAAE,CAAC;IACjD;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED,MAAM,WAAW,qCAAqC;IACrD;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,UAAU,EAAE,MAAM,EAAE,CAAC;IACrB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
|
@@ -8,9 +8,10 @@
|
|
|
8
8
|
*/
|
|
9
9
|
export interface ImageClassificationInput {
|
|
10
10
|
/**
|
|
11
|
-
* The input image data
|
|
11
|
+
* The input image data as a base64-encoded string. If no `parameters` are provided, you can
|
|
12
|
+
* also provide the image data as a raw bytes payload.
|
|
12
13
|
*/
|
|
13
|
-
inputs:
|
|
14
|
+
inputs: string;
|
|
14
15
|
/**
|
|
15
16
|
* Additional inference parameters
|
|
16
17
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-classification/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACxC
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-classification/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACxC;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,6BAA6B,CAAC;IAC3C,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;;;GAIG;AACH,MAAM,WAAW,6BAA6B;IAC7C,iBAAiB,CAAC,EAAE,6BAA6B,CAAC;IAClD;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;GAEG;AACH,MAAM,MAAM,6BAA6B,GAAG,SAAS,GAAG,SAAS,GAAG,MAAM,CAAC;AAC3E,MAAM,MAAM,yBAAyB,GAAG,gCAAgC,EAAE,CAAC;AAC3E;;GAEG;AACH,MAAM,WAAW,gCAAgC;IAChD;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
|
@@ -8,9 +8,10 @@
|
|
|
8
8
|
*/
|
|
9
9
|
export interface ImageSegmentationInput {
|
|
10
10
|
/**
|
|
11
|
-
* The input image data
|
|
11
|
+
* The input image data as a base64-encoded string. If no `parameters` are provided, you can
|
|
12
|
+
* also provide the image data as a raw bytes payload.
|
|
12
13
|
*/
|
|
13
|
-
inputs:
|
|
14
|
+
inputs: string;
|
|
14
15
|
/**
|
|
15
16
|
* Additional inference parameters
|
|
16
17
|
*/
|
|
@@ -41,6 +42,9 @@ export interface ImageSegmentationParameters {
|
|
|
41
42
|
threshold?: number;
|
|
42
43
|
[property: string]: unknown;
|
|
43
44
|
}
|
|
45
|
+
/**
|
|
46
|
+
* Segmentation task to be performed, depending on model capabilities.
|
|
47
|
+
*/
|
|
44
48
|
export type ImageSegmentationSubtask = "instance" | "panoptic" | "semantic";
|
|
45
49
|
export type ImageSegmentationOutput = ImageSegmentationOutputElement[];
|
|
46
50
|
/**
|
|
@@ -50,15 +54,15 @@ export type ImageSegmentationOutput = ImageSegmentationOutputElement[];
|
|
|
50
54
|
*/
|
|
51
55
|
export interface ImageSegmentationOutputElement {
|
|
52
56
|
/**
|
|
53
|
-
* The label of the predicted segment
|
|
57
|
+
* The label of the predicted segment.
|
|
54
58
|
*/
|
|
55
59
|
label: string;
|
|
56
60
|
/**
|
|
57
|
-
* The corresponding mask as a black-and-white image
|
|
61
|
+
* The corresponding mask as a black-and-white image (base64-encoded).
|
|
58
62
|
*/
|
|
59
|
-
mask:
|
|
63
|
+
mask: string;
|
|
60
64
|
/**
|
|
61
|
-
* The score or confidence
|
|
65
|
+
* The score or confidence degree the model has.
|
|
62
66
|
*/
|
|
63
67
|
score?: number;
|
|
64
68
|
[property: string]: unknown;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-segmentation/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,sBAAsB;IACtC
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-segmentation/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,sBAAsB;IACtC;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,2BAA2B,CAAC;IACzC,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;;;GAIG;AACH,MAAM,WAAW,2BAA2B;IAC3C;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,2BAA2B,CAAC,EAAE,MAAM,CAAC;IACrC;;OAEG;IACH,OAAO,CAAC,EAAE,wBAAwB,CAAC;IACnC;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;GAEG;AACH,MAAM,MAAM,wBAAwB,GAAG,UAAU,GAAG,UAAU,GAAG,UAAU,CAAC;AAC5E,MAAM,MAAM,uBAAuB,GAAG,8BAA8B,EAAE,CAAC;AACvE;;;;GAIG;AACH,MAAM,WAAW,8BAA8B;IAC9C;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
|
@@ -8,9 +8,10 @@
|
|
|
8
8
|
*/
|
|
9
9
|
export interface ImageToImageInput {
|
|
10
10
|
/**
|
|
11
|
-
* The input image data
|
|
11
|
+
* The input image data as a base64-encoded string. If no `parameters` are provided, you can
|
|
12
|
+
* also provide the image data as a raw bytes payload.
|
|
12
13
|
*/
|
|
13
|
-
inputs:
|
|
14
|
+
inputs: string;
|
|
14
15
|
/**
|
|
15
16
|
* Additional inference parameters
|
|
16
17
|
*/
|
|
@@ -38,13 +39,13 @@ export interface ImageToImageParameters {
|
|
|
38
39
|
*/
|
|
39
40
|
num_inference_steps?: number;
|
|
40
41
|
/**
|
|
41
|
-
* The size in pixel of the output image
|
|
42
|
+
* The size in pixel of the output image.
|
|
42
43
|
*/
|
|
43
44
|
target_size?: TargetSize;
|
|
44
45
|
[property: string]: unknown;
|
|
45
46
|
}
|
|
46
47
|
/**
|
|
47
|
-
* The size in pixel of the output image
|
|
48
|
+
* The size in pixel of the output image.
|
|
48
49
|
*/
|
|
49
50
|
export interface TargetSize {
|
|
50
51
|
height: number;
|
|
@@ -56,7 +57,7 @@ export interface TargetSize {
|
|
|
56
57
|
*/
|
|
57
58
|
export interface ImageToImageOutput {
|
|
58
59
|
/**
|
|
59
|
-
* The output image
|
|
60
|
+
* The output image returned as raw bytes in the payload.
|
|
60
61
|
*/
|
|
61
62
|
image?: unknown;
|
|
62
63
|
[property: string]: unknown;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-to-image/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,iBAAiB;IACjC
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-to-image/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,iBAAiB;IACjC;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,sBAAsB,CAAC;IACpC,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,sBAAsB;IACtC;;;OAGG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,eAAe,CAAC,EAAE,MAAM,EAAE,CAAC;IAC3B;;;OAGG;IACH,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B;;OAEG;IACH,WAAW,CAAC,EAAE,UAAU,CAAC;IACzB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,WAAW,UAAU;IAC1B,MAAM,EAAE,MAAM,CAAC;IACf,KAAK,EAAE,MAAM,CAAC;IACd,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,WAAW,kBAAkB;IAClC;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
|
@@ -18,7 +18,7 @@ export type * from "./table-question-answering/inference";
|
|
|
18
18
|
export type { TextToImageInput, TextToImageOutput, TextToImageParameters } from "./text-to-image/inference";
|
|
19
19
|
export type { TextToAudioParameters, TextToSpeechInput, TextToSpeechOutput } from "./text-to-speech/inference";
|
|
20
20
|
export type * from "./token-classification/inference";
|
|
21
|
-
export type {
|
|
21
|
+
export type { TranslationInput, TranslationOutput } from "./translation/inference";
|
|
22
22
|
export type { ClassificationOutputTransform, TextClassificationInput, TextClassificationOutput, TextClassificationOutputElement, TextClassificationParameters, } from "./text-classification/inference";
|
|
23
23
|
export type { TextGenerationOutputFinishReason, TextGenerationOutputPrefillToken, TextGenerationInput, TextGenerationOutput, TextGenerationOutputDetails, TextGenerationInputGenerateParameters, TextGenerationOutputBestOfSequence, TextGenerationOutputToken, TextGenerationStreamOutputStreamDetails, TextGenerationStreamOutput, } from "./text-generation/inference";
|
|
24
24
|
export type * from "./video-classification/inference";
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/tasks/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,cAAc,CAAC;AA0CjD,mBAAmB,kCAAkC,CAAC;AACtD,mBAAmB,0CAA0C,CAAC;AAC9D,YAAY,EACX,mBAAmB,EACnB,0BAA0B,EAC1B,oBAAoB,EACpB,4BAA4B,EAC5B,2BAA2B,EAC3B,0BAA0B,EAC1B,gCAAgC,EAChC,+BAA+B,GAC/B,MAAM,6BAA6B,CAAC;AACrC,mBAAmB,yCAAyC,CAAC;AAC7D,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,uBAAuB,CAAC;AAC3C,YAAY,EACX,wBAAwB,EACxB,yBAAyB,EACzB,gCAAgC,EAChC,6BAA6B,GAC7B,MAAM,kCAAkC,CAAC;AAC1C,mBAAmB,4BAA4B,CAAC;AAChD,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,2BAA2B,CAAC;AAC5G,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,8BAA8B,CAAC;AAClD,mBAAmB,8BAA8B,CAAC;AAClD,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,iCAAiC,CAAC;AACrD,mBAAmB,2BAA2B,CAAC;AAC/C,mBAAmB,sCAAsC,CAAC;AAC1D,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,2BAA2B,CAAC;AAC5G,YAAY,EAAE,qBAAqB,EAAE,iBAAiB,EAAE,kBAAkB,EAAE,MAAM,4BAA4B,CAAC;AAC/G,mBAAmB,kCAAkC,CAAC;AACtD,YAAY,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/tasks/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,cAAc,CAAC;AA0CjD,mBAAmB,kCAAkC,CAAC;AACtD,mBAAmB,0CAA0C,CAAC;AAC9D,YAAY,EACX,mBAAmB,EACnB,0BAA0B,EAC1B,oBAAoB,EACpB,4BAA4B,EAC5B,2BAA2B,EAC3B,0BAA0B,EAC1B,gCAAgC,EAChC,+BAA+B,GAC/B,MAAM,6BAA6B,CAAC;AACrC,mBAAmB,yCAAyC,CAAC;AAC7D,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,uBAAuB,CAAC;AAC3C,YAAY,EACX,wBAAwB,EACxB,yBAAyB,EACzB,gCAAgC,EAChC,6BAA6B,GAC7B,MAAM,kCAAkC,CAAC;AAC1C,mBAAmB,4BAA4B,CAAC;AAChD,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,2BAA2B,CAAC;AAC5G,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,8BAA8B,CAAC;AAClD,mBAAmB,8BAA8B,CAAC;AAClD,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,iCAAiC,CAAC;AACrD,mBAAmB,2BAA2B,CAAC;AAC/C,mBAAmB,sCAAsC,CAAC;AAC1D,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,2BAA2B,CAAC;AAC5G,YAAY,EAAE,qBAAqB,EAAE,iBAAiB,EAAE,kBAAkB,EAAE,MAAM,4BAA4B,CAAC;AAC/G,mBAAmB,kCAAkC,CAAC;AACtD,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,yBAAyB,CAAC;AACnF,YAAY,EACX,6BAA6B,EAC7B,uBAAuB,EACvB,wBAAwB,EACxB,+BAA+B,EAC/B,4BAA4B,GAC5B,MAAM,iCAAiC,CAAC;AACzC,YAAY,EACX,gCAAgC,EAChC,gCAAgC,EAChC,mBAAmB,EACnB,oBAAoB,EACpB,2BAA2B,EAC3B,qCAAqC,EACrC,kCAAkC,EAClC,yBAAyB,EACzB,uCAAuC,EACvC,0BAA0B,GAC1B,MAAM,6BAA6B,CAAC;AACrC,mBAAmB,kCAAkC,CAAC;AACtD,mBAAmB,uCAAuC,CAAC;AAC3D,mBAAmB,sCAAsC,CAAC;AAC1D,mBAAmB,4CAA4C,CAAC;AAChE,YAAY,EACX,WAAW,EACX,4BAA4B,EAC5B,gCAAgC,EAChC,6BAA6B,EAC7B,oCAAoC,GACpC,MAAM,wCAAwC,CAAC;AAEhD,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AAE1D;;GAEG;AACH,eAAO,MAAM,qBAAqB,EAAE,MAAM,CAAC,YAAY,EAAE,eAAe,EAAE,CA4DzE,CAAC;AAoBF,eAAO,MAAM,UAAU,EAAE,MAAM,CAAC,YAAY,EAAE,QAAQ,GAAG,SAAS,CAoDxD,CAAC;AAEX,MAAM,WAAW,WAAW;IAC3B,WAAW,EAAE,MAAM,CAAC;IACpB,EAAE,EAAE,MAAM,CAAC;CACX;AAED,MAAM,MAAM,aAAa,GACtB;IACA,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,EAAE,OAAO,CAAC;CACb,GACD;IACA,IAAI,EAAE,KAAK,CAAC;QACX,KAAK,EAAE,MAAM,CAAC;QACd,KAAK,EAAE,MAAM,CAAC;KACd,CAAC,CAAC;IACH,IAAI,EAAE,OAAO,CAAC;CACb,GACD;IACA,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,EAAE,KAAK,CAAC;CACX,GACD;IACA,KAAK,EAAE,MAAM,EAAE,EAAE,CAAC;IAClB,IAAI,EAAE,SAAS,CAAC;CACf,GACD;IACA,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;CACZ,GACD;IACA,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,KAAK,CAAC;QACb,GAAG,EAAE,MAAM,CAAC;QACZ,KAAK,EAAE,MAAM,CAAC;QACd,IAAI,EAAE,MAAM,CAAC;KACb,CAAC,CAAC;IACH,IAAI,EAAE,kBAAkB,CAAC;CACxB,CAAC;AAEL,MAAM,WAAW,QAAQ;IACxB,MAAM,EAAE,aAAa,EAAE,CAAC;IACxB,OAAO,EAAE,aAAa,EAAE,CAAC;CACzB;AAED,MAAM,WAAW,QAAQ;IACxB,QAAQ,EAAE,WAAW,EAAE,CAAC;IACxB,IAAI,EAAE,QAAQ,CAAC;IACf,EAAE,EAAE,YAAY,CAAC;IACjB,WAAW,CAAC,EAAE,YAAY,CAAC;IAC3B,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,KAAK,EAAE,MAAM,CAAC;IACd,SAAS,EAAE,eAAe,EAAE,CAAC;IAC7B,OAAO,EAAE,WAAW,EAAE,CAAC;IACvB,MAAM,EAAE,WAAW,EAAE,CAAC;IACtB,MAAM,EAAE,WAAW,EAAE,CAAC;IACtB,OAAO,EAAE,MAAM,CAAC;IAChB,YAAY,EAAE,MAAM,EAAE,CAAC;IACvB,SAAS,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,MAAM,cAAc,GAAG,IAAI,CAAC,QAAQ,EAAE,IAAI,GAAG,OAAO,GAAG,WAAW,CAAC,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"data.d.ts","sourceRoot":"","sources":["../../../../src/tasks/keypoint-detection/data.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,IAAI,CAAC;AAEzC,QAAA,MAAM,QAAQ,EAAE,cAyCf,CAAC;AAEF,eAAe,QAAQ,CAAC"}
|
|
@@ -8,9 +8,10 @@
|
|
|
8
8
|
*/
|
|
9
9
|
export interface ObjectDetectionInput {
|
|
10
10
|
/**
|
|
11
|
-
* The input image data
|
|
11
|
+
* The input image data as a base64-encoded string. If no `parameters` are provided, you can
|
|
12
|
+
* also provide the image data as a raw bytes payload.
|
|
12
13
|
*/
|
|
13
|
-
inputs:
|
|
14
|
+
inputs: string;
|
|
14
15
|
/**
|
|
15
16
|
* Additional inference parameters
|
|
16
17
|
*/
|
|
@@ -34,9 +35,21 @@ export interface ObjectDetectionParameters {
|
|
|
34
35
|
* image.
|
|
35
36
|
*/
|
|
36
37
|
export interface BoundingBox {
|
|
38
|
+
/**
|
|
39
|
+
* The x-coordinate of the bottom-right corner of the bounding box.
|
|
40
|
+
*/
|
|
37
41
|
xmax: number;
|
|
42
|
+
/**
|
|
43
|
+
* The x-coordinate of the top-left corner of the bounding box.
|
|
44
|
+
*/
|
|
38
45
|
xmin: number;
|
|
46
|
+
/**
|
|
47
|
+
* The y-coordinate of the bottom-right corner of the bounding box.
|
|
48
|
+
*/
|
|
39
49
|
ymax: number;
|
|
50
|
+
/**
|
|
51
|
+
* The y-coordinate of the top-left corner of the bounding box.
|
|
52
|
+
*/
|
|
40
53
|
ymin: number;
|
|
41
54
|
[property: string]: unknown;
|
|
42
55
|
}
|
|
@@ -51,11 +64,11 @@ export interface ObjectDetectionOutputElement {
|
|
|
51
64
|
*/
|
|
52
65
|
box: BoundingBox;
|
|
53
66
|
/**
|
|
54
|
-
* The predicted label for the bounding box
|
|
67
|
+
* The predicted label for the bounding box.
|
|
55
68
|
*/
|
|
56
69
|
label: string;
|
|
57
70
|
/**
|
|
58
|
-
* The associated score / probability
|
|
71
|
+
* The associated score / probability.
|
|
59
72
|
*/
|
|
60
73
|
score: number;
|
|
61
74
|
[property: string]: unknown;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/object-detection/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACpC
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/object-detection/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACpC;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,yBAAyB,CAAC;IACvC,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;;;GAIG;AACH,MAAM,WAAW,yBAAyB;IACzC;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;;GAGG;AACH,MAAM,WAAW,WAAW;IAC3B;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD,MAAM,MAAM,qBAAqB,GAAG,4BAA4B,EAAE,CAAC;AACnE;;GAEG;AACH,MAAM,WAAW,4BAA4B;IAC5C;;;OAGG;IACH,GAAG,EAAE,WAAW,CAAC;IACjB;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|