@huggingface/tasks 0.11.12 → 0.12.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +74 -2
- package/dist/index.js +74 -2
- package/dist/src/model-libraries-snippets.d.ts +1 -0
- package/dist/src/model-libraries-snippets.d.ts.map +1 -1
- package/dist/src/model-libraries.d.ts +15 -2
- package/dist/src/model-libraries.d.ts.map +1 -1
- package/dist/src/pipelines.d.ts +18 -2
- package/dist/src/pipelines.d.ts.map +1 -1
- package/dist/src/tasks/audio-classification/inference.d.ts +3 -2
- package/dist/src/tasks/audio-classification/inference.d.ts.map +1 -1
- package/dist/src/tasks/automatic-speech-recognition/inference.d.ts +3 -2
- package/dist/src/tasks/automatic-speech-recognition/inference.d.ts.map +1 -1
- package/dist/src/tasks/image-classification/inference.d.ts +3 -2
- package/dist/src/tasks/image-classification/inference.d.ts.map +1 -1
- package/dist/src/tasks/image-segmentation/inference.d.ts +10 -6
- package/dist/src/tasks/image-segmentation/inference.d.ts.map +1 -1
- package/dist/src/tasks/image-to-image/inference.d.ts +6 -5
- package/dist/src/tasks/image-to-image/inference.d.ts.map +1 -1
- package/dist/src/tasks/index.d.ts +1 -1
- package/dist/src/tasks/index.d.ts.map +1 -1
- package/dist/src/tasks/keypoint-detection/data.d.ts +4 -0
- package/dist/src/tasks/keypoint-detection/data.d.ts.map +1 -0
- package/dist/src/tasks/object-detection/inference.d.ts +17 -4
- package/dist/src/tasks/object-detection/inference.d.ts.map +1 -1
- package/dist/src/tasks/summarization/inference.d.ts +13 -12
- package/dist/src/tasks/summarization/inference.d.ts.map +1 -1
- package/dist/src/tasks/text-to-image/inference.d.ts +11 -7
- package/dist/src/tasks/text-to-image/inference.d.ts.map +1 -1
- package/dist/src/tasks/translation/inference.d.ts +21 -10
- package/dist/src/tasks/translation/inference.d.ts.map +1 -1
- package/package.json +1 -1
- package/src/model-libraries-snippets.ts +42 -0
- package/src/model-libraries.ts +13 -0
- package/src/pipelines.ts +18 -0
- package/src/tasks/audio-classification/inference.ts +3 -2
- package/src/tasks/audio-classification/spec/input.json +2 -1
- package/src/tasks/audio-classification/spec/output.json +1 -0
- package/src/tasks/automatic-speech-recognition/inference.ts +3 -2
- package/src/tasks/automatic-speech-recognition/spec/input.json +2 -1
- package/src/tasks/common-definitions.json +3 -20
- package/src/tasks/image-classification/inference.ts +3 -2
- package/src/tasks/image-classification/spec/input.json +2 -1
- package/src/tasks/image-classification/spec/output.json +1 -0
- package/src/tasks/image-segmentation/inference.ts +10 -6
- package/src/tasks/image-segmentation/spec/input.json +3 -12
- package/src/tasks/image-segmentation/spec/output.json +4 -3
- package/src/tasks/image-to-image/about.md +70 -21
- package/src/tasks/image-to-image/data.ts +1 -1
- package/src/tasks/image-to-image/inference.ts +6 -5
- package/src/tasks/image-to-image/spec/input.json +3 -2
- package/src/tasks/image-to-image/spec/output.json +1 -1
- package/src/tasks/index.ts +5 -6
- package/src/tasks/keypoint-detection/about.md +59 -0
- package/src/tasks/keypoint-detection/data.ts +46 -0
- package/src/tasks/object-detection/inference.ts +17 -4
- package/src/tasks/object-detection/spec/input.json +2 -1
- package/src/tasks/object-detection/spec/output.json +10 -6
- package/src/tasks/summarization/inference.ts +13 -12
- package/src/tasks/summarization/spec/input.json +37 -2
- package/src/tasks/text-classification/spec/output.json +1 -0
- package/src/tasks/text-to-image/inference.ts +11 -7
- package/src/tasks/text-to-image/spec/input.json +8 -4
- package/src/tasks/text-to-image/spec/output.json +1 -1
- package/src/tasks/translation/inference.ts +21 -10
- package/src/tasks/translation/spec/input.json +45 -2
- package/src/tasks/zero-shot-classification/spec/output.json +1 -0
package/dist/index.cjs
CHANGED
|
@@ -1392,6 +1392,24 @@ var PIPELINE_DATA = {
|
|
|
1392
1392
|
name: "Video-Text-to-Text",
|
|
1393
1393
|
modality: "multimodal",
|
|
1394
1394
|
color: "blue",
|
|
1395
|
+
hideInDatasets: false
|
|
1396
|
+
},
|
|
1397
|
+
"keypoint-detection": {
|
|
1398
|
+
name: "Keypoint Detection",
|
|
1399
|
+
subtasks: [
|
|
1400
|
+
{
|
|
1401
|
+
type: "pose-estimation",
|
|
1402
|
+
name: "Pose Estimation"
|
|
1403
|
+
}
|
|
1404
|
+
],
|
|
1405
|
+
modality: "cv",
|
|
1406
|
+
color: "red",
|
|
1407
|
+
hideInDatasets: true
|
|
1408
|
+
},
|
|
1409
|
+
"any-to-any": {
|
|
1410
|
+
name: "Any-to-Any",
|
|
1411
|
+
modality: "multimodal",
|
|
1412
|
+
color: "yellow",
|
|
1395
1413
|
hideInDatasets: true
|
|
1396
1414
|
},
|
|
1397
1415
|
other: {
|
|
@@ -2057,7 +2075,7 @@ var taskData9 = {
|
|
|
2057
2075
|
id: "timbrooks/instruct-pix2pix"
|
|
2058
2076
|
}
|
|
2059
2077
|
],
|
|
2060
|
-
summary: "Image-to-image is the task of transforming
|
|
2078
|
+
summary: "Image-to-image is the task of transforming an input image through a variety of possible manipulations and enhancements, such as super-resolution, image inpainting, colorization, and more.",
|
|
2061
2079
|
widgetModels: ["lllyasviel/sd-controlnet-canny"],
|
|
2062
2080
|
youtubeId: ""
|
|
2063
2081
|
};
|
|
@@ -4209,6 +4227,7 @@ var TASKS_MODEL_LIBRARIES = {
|
|
|
4209
4227
|
"image-to-image": ["diffusers", "transformers", "transformers.js"],
|
|
4210
4228
|
"image-to-text": ["transformers", "transformers.js"],
|
|
4211
4229
|
"image-to-video": ["diffusers"],
|
|
4230
|
+
"keypoint-detection": ["transformers"],
|
|
4212
4231
|
"video-classification": ["transformers"],
|
|
4213
4232
|
"mask-generation": ["transformers"],
|
|
4214
4233
|
"multiple-choice": ["transformers"],
|
|
@@ -4251,7 +4270,8 @@ var TASKS_MODEL_LIBRARIES = {
|
|
|
4251
4270
|
"zero-shot-image-classification": ["transformers", "transformers.js"],
|
|
4252
4271
|
"zero-shot-object-detection": ["transformers", "transformers.js"],
|
|
4253
4272
|
"text-to-3d": ["diffusers"],
|
|
4254
|
-
"image-to-3d": ["diffusers"]
|
|
4273
|
+
"image-to-3d": ["diffusers"],
|
|
4274
|
+
"any-to-any": ["transformers"]
|
|
4255
4275
|
};
|
|
4256
4276
|
function getData(type, partialTaskData = data_default16) {
|
|
4257
4277
|
return {
|
|
@@ -4262,6 +4282,7 @@ function getData(type, partialTaskData = data_default16) {
|
|
|
4262
4282
|
};
|
|
4263
4283
|
}
|
|
4264
4284
|
var TASKS_DATA = {
|
|
4285
|
+
"any-to-any": getData("any-to-any", data_default16),
|
|
4265
4286
|
"audio-classification": getData("audio-classification", data_default),
|
|
4266
4287
|
"audio-to-audio": getData("audio-to-audio", data_default2),
|
|
4267
4288
|
"automatic-speech-recognition": getData("automatic-speech-recognition", data_default3),
|
|
@@ -4277,6 +4298,7 @@ var TASKS_DATA = {
|
|
|
4277
4298
|
"image-text-to-text": getData("image-text-to-text", data_default11),
|
|
4278
4299
|
"image-to-text": getData("image-to-text", data_default10),
|
|
4279
4300
|
"image-to-video": void 0,
|
|
4301
|
+
"keypoint-detection": getData("keypoint-detection", data_default16),
|
|
4280
4302
|
"mask-generation": getData("mask-generation", data_default13),
|
|
4281
4303
|
"multiple-choice": void 0,
|
|
4282
4304
|
"object-detection": getData("object-detection", data_default14),
|
|
@@ -4461,6 +4483,43 @@ var diffusers = (model) => {
|
|
|
4461
4483
|
return diffusers_default(model);
|
|
4462
4484
|
}
|
|
4463
4485
|
};
|
|
4486
|
+
var diffusionkit = (model) => {
|
|
4487
|
+
const sd3Snippet = `# Pipeline for Stable Diffusion 3
|
|
4488
|
+
from diffusionkit.mlx import DiffusionPipeline
|
|
4489
|
+
|
|
4490
|
+
pipeline = DiffusionPipeline(
|
|
4491
|
+
shift=3.0,
|
|
4492
|
+
use_t5=False,
|
|
4493
|
+
model_version=${model.id},
|
|
4494
|
+
low_memory_mode=True,
|
|
4495
|
+
a16=True,
|
|
4496
|
+
w16=True,
|
|
4497
|
+
)`;
|
|
4498
|
+
const fluxSnippet = `# Pipeline for Flux
|
|
4499
|
+
from diffusionkit.mlx import FluxPipeline
|
|
4500
|
+
|
|
4501
|
+
pipeline = FluxPipeline(
|
|
4502
|
+
shift=1.0,
|
|
4503
|
+
model_version=${model.id},
|
|
4504
|
+
low_memory_mode=True,
|
|
4505
|
+
a16=True,
|
|
4506
|
+
w16=True,
|
|
4507
|
+
)`;
|
|
4508
|
+
const generateSnippet = `# Image Generation
|
|
4509
|
+
HEIGHT = 512
|
|
4510
|
+
WIDTH = 512
|
|
4511
|
+
NUM_STEPS = ${model.tags.includes("flux") ? 4 : 50}
|
|
4512
|
+
CFG_WEIGHT = ${model.tags.includes("flux") ? 0 : 5}
|
|
4513
|
+
|
|
4514
|
+
image, _ = pipeline.generate_image(
|
|
4515
|
+
"a photo of a cat",
|
|
4516
|
+
cfg_weight=CFG_WEIGHT,
|
|
4517
|
+
num_steps=NUM_STEPS,
|
|
4518
|
+
latent_size=(HEIGHT // 8, WIDTH // 8),
|
|
4519
|
+
)`;
|
|
4520
|
+
const pipelineSnippet = model.tags.includes("flux") ? fluxSnippet : sd3Snippet;
|
|
4521
|
+
return [pipelineSnippet, generateSnippet];
|
|
4522
|
+
};
|
|
4464
4523
|
var cartesia_pytorch = (model) => [
|
|
4465
4524
|
`# pip install --no-binary :all: cartesia-pytorch
|
|
4466
4525
|
from cartesia_pytorch import ReneLMHeadModel
|
|
@@ -5279,6 +5338,12 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
5279
5338
|
filter: true
|
|
5280
5339
|
/// diffusers has its own more complex "countDownloads" query
|
|
5281
5340
|
},
|
|
5341
|
+
diffusionkit: {
|
|
5342
|
+
prettyLabel: "DiffusionKit",
|
|
5343
|
+
repoName: "DiffusionKit",
|
|
5344
|
+
repoUrl: "https://github.com/argmaxinc/DiffusionKit",
|
|
5345
|
+
snippets: diffusionkit
|
|
5346
|
+
},
|
|
5282
5347
|
doctr: {
|
|
5283
5348
|
prettyLabel: "docTR",
|
|
5284
5349
|
repoName: "doctr",
|
|
@@ -5538,6 +5603,13 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
5538
5603
|
snippets: pyannote_audio,
|
|
5539
5604
|
filter: true
|
|
5540
5605
|
},
|
|
5606
|
+
"py-feat": {
|
|
5607
|
+
prettyLabel: "Py-Feat",
|
|
5608
|
+
repoName: "Py-Feat",
|
|
5609
|
+
repoUrl: "https://github.com/cosanlab/py-feat",
|
|
5610
|
+
docsUrl: "https://py-feat.org/",
|
|
5611
|
+
filter: false
|
|
5612
|
+
},
|
|
5541
5613
|
pythae: {
|
|
5542
5614
|
prettyLabel: "pythae",
|
|
5543
5615
|
repoName: "pythae",
|
package/dist/index.js
CHANGED
|
@@ -1354,6 +1354,24 @@ var PIPELINE_DATA = {
|
|
|
1354
1354
|
name: "Video-Text-to-Text",
|
|
1355
1355
|
modality: "multimodal",
|
|
1356
1356
|
color: "blue",
|
|
1357
|
+
hideInDatasets: false
|
|
1358
|
+
},
|
|
1359
|
+
"keypoint-detection": {
|
|
1360
|
+
name: "Keypoint Detection",
|
|
1361
|
+
subtasks: [
|
|
1362
|
+
{
|
|
1363
|
+
type: "pose-estimation",
|
|
1364
|
+
name: "Pose Estimation"
|
|
1365
|
+
}
|
|
1366
|
+
],
|
|
1367
|
+
modality: "cv",
|
|
1368
|
+
color: "red",
|
|
1369
|
+
hideInDatasets: true
|
|
1370
|
+
},
|
|
1371
|
+
"any-to-any": {
|
|
1372
|
+
name: "Any-to-Any",
|
|
1373
|
+
modality: "multimodal",
|
|
1374
|
+
color: "yellow",
|
|
1357
1375
|
hideInDatasets: true
|
|
1358
1376
|
},
|
|
1359
1377
|
other: {
|
|
@@ -2019,7 +2037,7 @@ var taskData9 = {
|
|
|
2019
2037
|
id: "timbrooks/instruct-pix2pix"
|
|
2020
2038
|
}
|
|
2021
2039
|
],
|
|
2022
|
-
summary: "Image-to-image is the task of transforming
|
|
2040
|
+
summary: "Image-to-image is the task of transforming an input image through a variety of possible manipulations and enhancements, such as super-resolution, image inpainting, colorization, and more.",
|
|
2023
2041
|
widgetModels: ["lllyasviel/sd-controlnet-canny"],
|
|
2024
2042
|
youtubeId: ""
|
|
2025
2043
|
};
|
|
@@ -4171,6 +4189,7 @@ var TASKS_MODEL_LIBRARIES = {
|
|
|
4171
4189
|
"image-to-image": ["diffusers", "transformers", "transformers.js"],
|
|
4172
4190
|
"image-to-text": ["transformers", "transformers.js"],
|
|
4173
4191
|
"image-to-video": ["diffusers"],
|
|
4192
|
+
"keypoint-detection": ["transformers"],
|
|
4174
4193
|
"video-classification": ["transformers"],
|
|
4175
4194
|
"mask-generation": ["transformers"],
|
|
4176
4195
|
"multiple-choice": ["transformers"],
|
|
@@ -4213,7 +4232,8 @@ var TASKS_MODEL_LIBRARIES = {
|
|
|
4213
4232
|
"zero-shot-image-classification": ["transformers", "transformers.js"],
|
|
4214
4233
|
"zero-shot-object-detection": ["transformers", "transformers.js"],
|
|
4215
4234
|
"text-to-3d": ["diffusers"],
|
|
4216
|
-
"image-to-3d": ["diffusers"]
|
|
4235
|
+
"image-to-3d": ["diffusers"],
|
|
4236
|
+
"any-to-any": ["transformers"]
|
|
4217
4237
|
};
|
|
4218
4238
|
function getData(type, partialTaskData = data_default16) {
|
|
4219
4239
|
return {
|
|
@@ -4224,6 +4244,7 @@ function getData(type, partialTaskData = data_default16) {
|
|
|
4224
4244
|
};
|
|
4225
4245
|
}
|
|
4226
4246
|
var TASKS_DATA = {
|
|
4247
|
+
"any-to-any": getData("any-to-any", data_default16),
|
|
4227
4248
|
"audio-classification": getData("audio-classification", data_default),
|
|
4228
4249
|
"audio-to-audio": getData("audio-to-audio", data_default2),
|
|
4229
4250
|
"automatic-speech-recognition": getData("automatic-speech-recognition", data_default3),
|
|
@@ -4239,6 +4260,7 @@ var TASKS_DATA = {
|
|
|
4239
4260
|
"image-text-to-text": getData("image-text-to-text", data_default11),
|
|
4240
4261
|
"image-to-text": getData("image-to-text", data_default10),
|
|
4241
4262
|
"image-to-video": void 0,
|
|
4263
|
+
"keypoint-detection": getData("keypoint-detection", data_default16),
|
|
4242
4264
|
"mask-generation": getData("mask-generation", data_default13),
|
|
4243
4265
|
"multiple-choice": void 0,
|
|
4244
4266
|
"object-detection": getData("object-detection", data_default14),
|
|
@@ -4423,6 +4445,43 @@ var diffusers = (model) => {
|
|
|
4423
4445
|
return diffusers_default(model);
|
|
4424
4446
|
}
|
|
4425
4447
|
};
|
|
4448
|
+
var diffusionkit = (model) => {
|
|
4449
|
+
const sd3Snippet = `# Pipeline for Stable Diffusion 3
|
|
4450
|
+
from diffusionkit.mlx import DiffusionPipeline
|
|
4451
|
+
|
|
4452
|
+
pipeline = DiffusionPipeline(
|
|
4453
|
+
shift=3.0,
|
|
4454
|
+
use_t5=False,
|
|
4455
|
+
model_version=${model.id},
|
|
4456
|
+
low_memory_mode=True,
|
|
4457
|
+
a16=True,
|
|
4458
|
+
w16=True,
|
|
4459
|
+
)`;
|
|
4460
|
+
const fluxSnippet = `# Pipeline for Flux
|
|
4461
|
+
from diffusionkit.mlx import FluxPipeline
|
|
4462
|
+
|
|
4463
|
+
pipeline = FluxPipeline(
|
|
4464
|
+
shift=1.0,
|
|
4465
|
+
model_version=${model.id},
|
|
4466
|
+
low_memory_mode=True,
|
|
4467
|
+
a16=True,
|
|
4468
|
+
w16=True,
|
|
4469
|
+
)`;
|
|
4470
|
+
const generateSnippet = `# Image Generation
|
|
4471
|
+
HEIGHT = 512
|
|
4472
|
+
WIDTH = 512
|
|
4473
|
+
NUM_STEPS = ${model.tags.includes("flux") ? 4 : 50}
|
|
4474
|
+
CFG_WEIGHT = ${model.tags.includes("flux") ? 0 : 5}
|
|
4475
|
+
|
|
4476
|
+
image, _ = pipeline.generate_image(
|
|
4477
|
+
"a photo of a cat",
|
|
4478
|
+
cfg_weight=CFG_WEIGHT,
|
|
4479
|
+
num_steps=NUM_STEPS,
|
|
4480
|
+
latent_size=(HEIGHT // 8, WIDTH // 8),
|
|
4481
|
+
)`;
|
|
4482
|
+
const pipelineSnippet = model.tags.includes("flux") ? fluxSnippet : sd3Snippet;
|
|
4483
|
+
return [pipelineSnippet, generateSnippet];
|
|
4484
|
+
};
|
|
4426
4485
|
var cartesia_pytorch = (model) => [
|
|
4427
4486
|
`# pip install --no-binary :all: cartesia-pytorch
|
|
4428
4487
|
from cartesia_pytorch import ReneLMHeadModel
|
|
@@ -5241,6 +5300,12 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
5241
5300
|
filter: true
|
|
5242
5301
|
/// diffusers has its own more complex "countDownloads" query
|
|
5243
5302
|
},
|
|
5303
|
+
diffusionkit: {
|
|
5304
|
+
prettyLabel: "DiffusionKit",
|
|
5305
|
+
repoName: "DiffusionKit",
|
|
5306
|
+
repoUrl: "https://github.com/argmaxinc/DiffusionKit",
|
|
5307
|
+
snippets: diffusionkit
|
|
5308
|
+
},
|
|
5244
5309
|
doctr: {
|
|
5245
5310
|
prettyLabel: "docTR",
|
|
5246
5311
|
repoName: "doctr",
|
|
@@ -5500,6 +5565,13 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
5500
5565
|
snippets: pyannote_audio,
|
|
5501
5566
|
filter: true
|
|
5502
5567
|
},
|
|
5568
|
+
"py-feat": {
|
|
5569
|
+
prettyLabel: "Py-Feat",
|
|
5570
|
+
repoName: "Py-Feat",
|
|
5571
|
+
repoUrl: "https://github.com/cosanlab/py-feat",
|
|
5572
|
+
docsUrl: "https://py-feat.org/",
|
|
5573
|
+
filter: false
|
|
5574
|
+
},
|
|
5503
5575
|
pythae: {
|
|
5504
5576
|
prettyLabel: "pythae",
|
|
5505
5577
|
repoName: "pythae",
|
|
@@ -7,6 +7,7 @@ export declare const bertopic: (model: ModelData) => string[];
|
|
|
7
7
|
export declare const bm25s: (model: ModelData) => string[];
|
|
8
8
|
export declare const depth_anything_v2: (model: ModelData) => string[];
|
|
9
9
|
export declare const diffusers: (model: ModelData) => string[];
|
|
10
|
+
export declare const diffusionkit: (model: ModelData) => string[];
|
|
10
11
|
export declare const cartesia_pytorch: (model: ModelData) => string[];
|
|
11
12
|
export declare const cartesia_mlx: (model: ModelData) => string[];
|
|
12
13
|
export declare const edsnlp: (model: ModelData) => string[];
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAY9C,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAMF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,iBAAiB,UAAW,SAAS,KAAG,MAAM,EA6C1D,CAAC;AA+BF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EAmBrD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAS9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,OAAO,QAA6B,MAAM,EAQtD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAanC,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EA2B7C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AAEF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAI7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EA4CrD,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,OAAO,QAAO,MAAM,EAYhC,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAYjD,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAK5C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AA6BF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAUnD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAYnC,CAAC"}
|
|
1
|
+
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAY9C,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAMF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,iBAAiB,UAAW,SAAS,KAAG,MAAM,EA6C1D,CAAC;AA+BF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EAwCrD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EAmBrD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAS9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,OAAO,QAA6B,MAAM,EAQtD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAanC,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EA2B7C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AAEF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAI7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EA4CrD,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,OAAO,QAAO,MAAM,EAYhC,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAYjD,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAK5C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AA6BF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAUnD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAYnC,CAAC"}
|
|
@@ -176,6 +176,12 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
176
176
|
snippets: (model: ModelData) => string[];
|
|
177
177
|
filter: true;
|
|
178
178
|
};
|
|
179
|
+
diffusionkit: {
|
|
180
|
+
prettyLabel: string;
|
|
181
|
+
repoName: string;
|
|
182
|
+
repoUrl: string;
|
|
183
|
+
snippets: (model: ModelData) => string[];
|
|
184
|
+
};
|
|
179
185
|
doctr: {
|
|
180
186
|
prettyLabel: string;
|
|
181
187
|
repoName: string;
|
|
@@ -434,6 +440,13 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
434
440
|
snippets: (model: ModelData) => string[];
|
|
435
441
|
filter: true;
|
|
436
442
|
};
|
|
443
|
+
"py-feat": {
|
|
444
|
+
prettyLabel: string;
|
|
445
|
+
repoName: string;
|
|
446
|
+
repoUrl: string;
|
|
447
|
+
docsUrl: string;
|
|
448
|
+
filter: false;
|
|
449
|
+
};
|
|
437
450
|
pythae: {
|
|
438
451
|
prettyLabel: string;
|
|
439
452
|
repoName: string;
|
|
@@ -648,6 +661,6 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
648
661
|
};
|
|
649
662
|
};
|
|
650
663
|
export type ModelLibraryKey = keyof typeof MODEL_LIBRARIES_UI_ELEMENTS;
|
|
651
|
-
export declare const ALL_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "deepforest" | "depth-anything-v2" | "diffree" | "diffusers" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
|
|
652
|
-
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "deepforest" | "depth-anything-v2" | "diffree" | "diffusers" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
|
|
664
|
+
export declare const ALL_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "deepforest" | "depth-anything-v2" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
|
|
665
|
+
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "deepforest" | "depth-anything-v2" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
|
|
653
666
|
//# sourceMappingURL=model-libraries.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B
|
|
1
|
+
{"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAgmBI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,4mCAAgE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,4mCAQ1B,CAAC"}
|
package/dist/src/pipelines.d.ts
CHANGED
|
@@ -391,6 +391,22 @@ export declare const PIPELINE_DATA: {
|
|
|
391
391
|
name: string;
|
|
392
392
|
modality: "multimodal";
|
|
393
393
|
color: "blue";
|
|
394
|
+
hideInDatasets: false;
|
|
395
|
+
};
|
|
396
|
+
"keypoint-detection": {
|
|
397
|
+
name: string;
|
|
398
|
+
subtasks: {
|
|
399
|
+
type: string;
|
|
400
|
+
name: string;
|
|
401
|
+
}[];
|
|
402
|
+
modality: "cv";
|
|
403
|
+
color: "red";
|
|
404
|
+
hideInDatasets: true;
|
|
405
|
+
};
|
|
406
|
+
"any-to-any": {
|
|
407
|
+
name: string;
|
|
408
|
+
modality: "multimodal";
|
|
409
|
+
color: "yellow";
|
|
394
410
|
hideInDatasets: true;
|
|
395
411
|
};
|
|
396
412
|
other: {
|
|
@@ -403,7 +419,7 @@ export declare const PIPELINE_DATA: {
|
|
|
403
419
|
};
|
|
404
420
|
export type PipelineType = keyof typeof PIPELINE_DATA;
|
|
405
421
|
export type WidgetType = PipelineType | "conversational";
|
|
406
|
-
export declare const PIPELINE_TYPES: ("other" | "text-classification" | "token-classification" | "table-question-answering" | "question-answering" | "zero-shot-classification" | "translation" | "summarization" | "feature-extraction" | "text-generation" | "text2text-generation" | "fill-mask" | "sentence-similarity" | "text-to-speech" | "text-to-audio" | "automatic-speech-recognition" | "audio-to-audio" | "audio-classification" | "voice-activity-detection" | "depth-estimation" | "image-classification" | "object-detection" | "image-segmentation" | "text-to-image" | "image-to-text" | "image-to-image" | "image-to-video" | "unconditional-image-generation" | "video-classification" | "reinforcement-learning" | "robotics" | "tabular-classification" | "tabular-regression" | "tabular-to-text" | "table-to-text" | "multiple-choice" | "text-retrieval" | "time-series-forecasting" | "text-to-video" | "image-text-to-text" | "visual-question-answering" | "document-question-answering" | "zero-shot-image-classification" | "graph-ml" | "mask-generation" | "zero-shot-object-detection" | "text-to-3d" | "image-to-3d" | "image-feature-extraction" | "video-text-to-text")[];
|
|
422
|
+
export declare const PIPELINE_TYPES: ("other" | "text-classification" | "token-classification" | "table-question-answering" | "question-answering" | "zero-shot-classification" | "translation" | "summarization" | "feature-extraction" | "text-generation" | "text2text-generation" | "fill-mask" | "sentence-similarity" | "text-to-speech" | "text-to-audio" | "automatic-speech-recognition" | "audio-to-audio" | "audio-classification" | "voice-activity-detection" | "depth-estimation" | "image-classification" | "object-detection" | "image-segmentation" | "text-to-image" | "image-to-text" | "image-to-image" | "image-to-video" | "unconditional-image-generation" | "video-classification" | "reinforcement-learning" | "robotics" | "tabular-classification" | "tabular-regression" | "tabular-to-text" | "table-to-text" | "multiple-choice" | "text-retrieval" | "time-series-forecasting" | "text-to-video" | "image-text-to-text" | "visual-question-answering" | "document-question-answering" | "zero-shot-image-classification" | "graph-ml" | "mask-generation" | "zero-shot-object-detection" | "text-to-3d" | "image-to-3d" | "image-feature-extraction" | "video-text-to-text" | "keypoint-detection" | "any-to-any")[];
|
|
407
423
|
export declare const SUBTASK_TYPES: string[];
|
|
408
|
-
export declare const PIPELINE_TYPES_SET: Set<"other" | "text-classification" | "token-classification" | "table-question-answering" | "question-answering" | "zero-shot-classification" | "translation" | "summarization" | "feature-extraction" | "text-generation" | "text2text-generation" | "fill-mask" | "sentence-similarity" | "text-to-speech" | "text-to-audio" | "automatic-speech-recognition" | "audio-to-audio" | "audio-classification" | "voice-activity-detection" | "depth-estimation" | "image-classification" | "object-detection" | "image-segmentation" | "text-to-image" | "image-to-text" | "image-to-image" | "image-to-video" | "unconditional-image-generation" | "video-classification" | "reinforcement-learning" | "robotics" | "tabular-classification" | "tabular-regression" | "tabular-to-text" | "table-to-text" | "multiple-choice" | "text-retrieval" | "time-series-forecasting" | "text-to-video" | "image-text-to-text" | "visual-question-answering" | "document-question-answering" | "zero-shot-image-classification" | "graph-ml" | "mask-generation" | "zero-shot-object-detection" | "text-to-3d" | "image-to-3d" | "image-feature-extraction" | "video-text-to-text">;
|
|
424
|
+
export declare const PIPELINE_TYPES_SET: Set<"other" | "text-classification" | "token-classification" | "table-question-answering" | "question-answering" | "zero-shot-classification" | "translation" | "summarization" | "feature-extraction" | "text-generation" | "text2text-generation" | "fill-mask" | "sentence-similarity" | "text-to-speech" | "text-to-audio" | "automatic-speech-recognition" | "audio-to-audio" | "audio-classification" | "voice-activity-detection" | "depth-estimation" | "image-classification" | "object-detection" | "image-segmentation" | "text-to-image" | "image-to-text" | "image-to-image" | "image-to-video" | "unconditional-image-generation" | "video-classification" | "reinforcement-learning" | "robotics" | "tabular-classification" | "tabular-regression" | "tabular-to-text" | "table-to-text" | "multiple-choice" | "text-retrieval" | "time-series-forecasting" | "text-to-video" | "image-text-to-text" | "visual-question-answering" | "document-question-answering" | "zero-shot-image-classification" | "graph-ml" | "mask-generation" | "zero-shot-object-detection" | "text-to-3d" | "image-to-3d" | "image-feature-extraction" | "video-text-to-text" | "keypoint-detection" | "any-to-any">;
|
|
409
425
|
//# sourceMappingURL=pipelines.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"pipelines.d.ts","sourceRoot":"","sources":["../../src/pipelines.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,UAAU,yEAA0E,CAAC;AAElG,MAAM,MAAM,QAAQ,GAAG,CAAC,OAAO,UAAU,CAAC,CAAC,MAAM,CAAC,CAAC;AAEnD,eAAO,MAAM,eAAe;;;;;;;;CAQQ,CAAC;AAErC;;;;;;GAMG;AACH,MAAM,WAAW,OAAO;IACvB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;CACb;AAED;;;;;GAKG;AACH,MAAM,WAAW,YAAY;IAC5B;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC;IACrB,QAAQ,EAAE,QAAQ,CAAC;IACnB;;OAEG;IACH,KAAK,EAAE,MAAM,GAAG,OAAO,GAAG,QAAQ,GAAG,QAAQ,GAAG,KAAK,GAAG,QAAQ,CAAC;IACjE;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;CACzB;AAcD,eAAO,MAAM,aAAa
|
|
1
|
+
{"version":3,"file":"pipelines.d.ts","sourceRoot":"","sources":["../../src/pipelines.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,UAAU,yEAA0E,CAAC;AAElG,MAAM,MAAM,QAAQ,GAAG,CAAC,OAAO,UAAU,CAAC,CAAC,MAAM,CAAC,CAAC;AAEnD,eAAO,MAAM,eAAe;;;;;;;;CAQQ,CAAC;AAErC;;;;;;GAMG;AACH,MAAM,WAAW,OAAO;IACvB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;CACb;AAED;;;;;GAKG;AACH,MAAM,WAAW,YAAY;IAC5B;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC;IACrB,QAAQ,EAAE,QAAQ,CAAC;IACnB;;OAEG;IACH,KAAK,EAAE,MAAM,GAAG,OAAO,GAAG,QAAQ,GAAG,QAAQ,GAAG,KAAK,GAAG,QAAQ,CAAC;IACjE;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;CACzB;AAcD,eAAO,MAAM,aAAa;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAsmBc,CAAC;AAEzC,MAAM,MAAM,YAAY,GAAG,MAAM,OAAO,aAAa,CAAC;AAEtD,MAAM,MAAM,UAAU,GAAG,YAAY,GAAG,gBAAgB,CAAC;AAEzD,eAAO,MAAM,cAAc,gpCAA+C,CAAC;AAE3E,eAAO,MAAM,aAAa,UAEN,CAAC;AAErB,eAAO,MAAM,kBAAkB,ipCAA0B,CAAC"}
|
|
@@ -8,9 +8,10 @@
|
|
|
8
8
|
*/
|
|
9
9
|
export interface AudioClassificationInput {
|
|
10
10
|
/**
|
|
11
|
-
* The input audio data
|
|
11
|
+
* The input audio data as a base64-encoded string. If no `parameters` are provided, you can
|
|
12
|
+
* also provide the audio data as a raw bytes payload.
|
|
12
13
|
*/
|
|
13
|
-
inputs:
|
|
14
|
+
inputs: string;
|
|
14
15
|
/**
|
|
15
16
|
* Additional inference parameters
|
|
16
17
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/audio-classification/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACxC
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/audio-classification/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACxC;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,6BAA6B,CAAC;IAC3C,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;;;GAIG;AACH,MAAM,WAAW,6BAA6B;IAC7C,iBAAiB,CAAC,EAAE,6BAA6B,CAAC;IAClD;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;GAEG;AACH,MAAM,MAAM,6BAA6B,GAAG,SAAS,GAAG,SAAS,GAAG,MAAM,CAAC;AAC3E,MAAM,MAAM,yBAAyB,GAAG,gCAAgC,EAAE,CAAC;AAC3E;;GAEG;AACH,MAAM,WAAW,gCAAgC;IAChD;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
|
@@ -8,9 +8,10 @@
|
|
|
8
8
|
*/
|
|
9
9
|
export interface AutomaticSpeechRecognitionInput {
|
|
10
10
|
/**
|
|
11
|
-
* The input audio data
|
|
11
|
+
* The input audio data as a base64-encoded string. If no `parameters` are provided, you can
|
|
12
|
+
* also provide the audio data as a raw bytes payload.
|
|
12
13
|
*/
|
|
13
|
-
inputs:
|
|
14
|
+
inputs: string;
|
|
14
15
|
/**
|
|
15
16
|
* Additional inference parameters
|
|
16
17
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/automatic-speech-recognition/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,+BAA+B;IAC/C
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/automatic-speech-recognition/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,+BAA+B;IAC/C;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,oCAAoC,CAAC;IAClD,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,oCAAoC;IACpD;;OAEG;IACH,QAAQ,CAAC,EAAE,oBAAoB,CAAC;IAChC;;OAEG;IACH,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAC5B,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,oBAAoB;IACpC;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;;;OAKG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;;;;;;;OAQG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;;OAGG;IACH,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;;OAGG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;;;;;OAMG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,MAAM,kBAAkB,GAAG,OAAO,GAAG,OAAO,CAAC;AAEnD;;GAEG;AACH,MAAM,WAAW,gCAAgC;IAChD;;;OAGG;IACH,MAAM,CAAC,EAAE,qCAAqC,EAAE,CAAC;IACjD;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED,MAAM,WAAW,qCAAqC;IACrD;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,UAAU,EAAE,MAAM,EAAE,CAAC;IACrB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
|
@@ -8,9 +8,10 @@
|
|
|
8
8
|
*/
|
|
9
9
|
export interface ImageClassificationInput {
|
|
10
10
|
/**
|
|
11
|
-
* The input image data
|
|
11
|
+
* The input image data as a base64-encoded string. If no `parameters` are provided, you can
|
|
12
|
+
* also provide the image data as a raw bytes payload.
|
|
12
13
|
*/
|
|
13
|
-
inputs:
|
|
14
|
+
inputs: string;
|
|
14
15
|
/**
|
|
15
16
|
* Additional inference parameters
|
|
16
17
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-classification/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACxC
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-classification/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACxC;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,6BAA6B,CAAC;IAC3C,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;;;GAIG;AACH,MAAM,WAAW,6BAA6B;IAC7C,iBAAiB,CAAC,EAAE,6BAA6B,CAAC;IAClD;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;GAEG;AACH,MAAM,MAAM,6BAA6B,GAAG,SAAS,GAAG,SAAS,GAAG,MAAM,CAAC;AAC3E,MAAM,MAAM,yBAAyB,GAAG,gCAAgC,EAAE,CAAC;AAC3E;;GAEG;AACH,MAAM,WAAW,gCAAgC;IAChD;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
|
@@ -8,9 +8,10 @@
|
|
|
8
8
|
*/
|
|
9
9
|
export interface ImageSegmentationInput {
|
|
10
10
|
/**
|
|
11
|
-
* The input image data
|
|
11
|
+
* The input image data as a base64-encoded string. If no `parameters` are provided, you can
|
|
12
|
+
* also provide the image data as a raw bytes payload.
|
|
12
13
|
*/
|
|
13
|
-
inputs:
|
|
14
|
+
inputs: string;
|
|
14
15
|
/**
|
|
15
16
|
* Additional inference parameters
|
|
16
17
|
*/
|
|
@@ -41,6 +42,9 @@ export interface ImageSegmentationParameters {
|
|
|
41
42
|
threshold?: number;
|
|
42
43
|
[property: string]: unknown;
|
|
43
44
|
}
|
|
45
|
+
/**
|
|
46
|
+
* Segmentation task to be performed, depending on model capabilities.
|
|
47
|
+
*/
|
|
44
48
|
export type ImageSegmentationSubtask = "instance" | "panoptic" | "semantic";
|
|
45
49
|
export type ImageSegmentationOutput = ImageSegmentationOutputElement[];
|
|
46
50
|
/**
|
|
@@ -50,15 +54,15 @@ export type ImageSegmentationOutput = ImageSegmentationOutputElement[];
|
|
|
50
54
|
*/
|
|
51
55
|
export interface ImageSegmentationOutputElement {
|
|
52
56
|
/**
|
|
53
|
-
* The label of the predicted segment
|
|
57
|
+
* The label of the predicted segment.
|
|
54
58
|
*/
|
|
55
59
|
label: string;
|
|
56
60
|
/**
|
|
57
|
-
* The corresponding mask as a black-and-white image
|
|
61
|
+
* The corresponding mask as a black-and-white image (base64-encoded).
|
|
58
62
|
*/
|
|
59
|
-
mask:
|
|
63
|
+
mask: string;
|
|
60
64
|
/**
|
|
61
|
-
* The score or confidence
|
|
65
|
+
* The score or confidence degree the model has.
|
|
62
66
|
*/
|
|
63
67
|
score?: number;
|
|
64
68
|
[property: string]: unknown;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-segmentation/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,sBAAsB;IACtC
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-segmentation/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,sBAAsB;IACtC;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,2BAA2B,CAAC;IACzC,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;;;GAIG;AACH,MAAM,WAAW,2BAA2B;IAC3C;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,2BAA2B,CAAC,EAAE,MAAM,CAAC;IACrC;;OAEG;IACH,OAAO,CAAC,EAAE,wBAAwB,CAAC;IACnC;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;GAEG;AACH,MAAM,MAAM,wBAAwB,GAAG,UAAU,GAAG,UAAU,GAAG,UAAU,CAAC;AAC5E,MAAM,MAAM,uBAAuB,GAAG,8BAA8B,EAAE,CAAC;AACvE;;;;GAIG;AACH,MAAM,WAAW,8BAA8B;IAC9C;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
|
@@ -8,9 +8,10 @@
|
|
|
8
8
|
*/
|
|
9
9
|
export interface ImageToImageInput {
|
|
10
10
|
/**
|
|
11
|
-
* The input image data
|
|
11
|
+
* The input image data as a base64-encoded string. If no `parameters` are provided, you can
|
|
12
|
+
* also provide the image data as a raw bytes payload.
|
|
12
13
|
*/
|
|
13
|
-
inputs:
|
|
14
|
+
inputs: string;
|
|
14
15
|
/**
|
|
15
16
|
* Additional inference parameters
|
|
16
17
|
*/
|
|
@@ -38,13 +39,13 @@ export interface ImageToImageParameters {
|
|
|
38
39
|
*/
|
|
39
40
|
num_inference_steps?: number;
|
|
40
41
|
/**
|
|
41
|
-
* The size in pixel of the output image
|
|
42
|
+
* The size in pixel of the output image.
|
|
42
43
|
*/
|
|
43
44
|
target_size?: TargetSize;
|
|
44
45
|
[property: string]: unknown;
|
|
45
46
|
}
|
|
46
47
|
/**
|
|
47
|
-
* The size in pixel of the output image
|
|
48
|
+
* The size in pixel of the output image.
|
|
48
49
|
*/
|
|
49
50
|
export interface TargetSize {
|
|
50
51
|
height: number;
|
|
@@ -56,7 +57,7 @@ export interface TargetSize {
|
|
|
56
57
|
*/
|
|
57
58
|
export interface ImageToImageOutput {
|
|
58
59
|
/**
|
|
59
|
-
* The output image
|
|
60
|
+
* The output image returned as raw bytes in the payload.
|
|
60
61
|
*/
|
|
61
62
|
image?: unknown;
|
|
62
63
|
[property: string]: unknown;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-to-image/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,iBAAiB;IACjC
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-to-image/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,iBAAiB;IACjC;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,sBAAsB,CAAC;IACpC,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,sBAAsB;IACtC;;;OAGG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,eAAe,CAAC,EAAE,MAAM,EAAE,CAAC;IAC3B;;;OAGG;IACH,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B;;OAEG;IACH,WAAW,CAAC,EAAE,UAAU,CAAC;IACzB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,WAAW,UAAU;IAC1B,MAAM,EAAE,MAAM,CAAC;IACf,KAAK,EAAE,MAAM,CAAC;IACd,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,WAAW,kBAAkB;IAClC;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
|
@@ -18,7 +18,7 @@ export type * from "./table-question-answering/inference";
|
|
|
18
18
|
export type { TextToImageInput, TextToImageOutput, TextToImageParameters } from "./text-to-image/inference";
|
|
19
19
|
export type { TextToAudioParameters, TextToSpeechInput, TextToSpeechOutput } from "./text-to-speech/inference";
|
|
20
20
|
export type * from "./token-classification/inference";
|
|
21
|
-
export type {
|
|
21
|
+
export type { TranslationInput, TranslationOutput } from "./translation/inference";
|
|
22
22
|
export type { ClassificationOutputTransform, TextClassificationInput, TextClassificationOutput, TextClassificationOutputElement, TextClassificationParameters, } from "./text-classification/inference";
|
|
23
23
|
export type { TextGenerationOutputFinishReason, TextGenerationOutputPrefillToken, TextGenerationInput, TextGenerationOutput, TextGenerationOutputDetails, TextGenerationInputGenerateParameters, TextGenerationOutputBestOfSequence, TextGenerationOutputToken, TextGenerationStreamOutputStreamDetails, TextGenerationStreamOutput, } from "./text-generation/inference";
|
|
24
24
|
export type * from "./video-classification/inference";
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/tasks/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,cAAc,CAAC;AA0CjD,mBAAmB,kCAAkC,CAAC;AACtD,mBAAmB,0CAA0C,CAAC;AAC9D,YAAY,EACX,mBAAmB,EACnB,0BAA0B,EAC1B,oBAAoB,EACpB,4BAA4B,EAC5B,2BAA2B,EAC3B,0BAA0B,EAC1B,gCAAgC,EAChC,+BAA+B,GAC/B,MAAM,6BAA6B,CAAC;AACrC,mBAAmB,yCAAyC,CAAC;AAC7D,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,uBAAuB,CAAC;AAC3C,YAAY,EACX,wBAAwB,EACxB,yBAAyB,EACzB,gCAAgC,EAChC,6BAA6B,GAC7B,MAAM,kCAAkC,CAAC;AAC1C,mBAAmB,4BAA4B,CAAC;AAChD,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,2BAA2B,CAAC;AAC5G,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,8BAA8B,CAAC;AAClD,mBAAmB,8BAA8B,CAAC;AAClD,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,iCAAiC,CAAC;AACrD,mBAAmB,2BAA2B,CAAC;AAC/C,mBAAmB,sCAAsC,CAAC;AAC1D,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,2BAA2B,CAAC;AAC5G,YAAY,EAAE,qBAAqB,EAAE,iBAAiB,EAAE,kBAAkB,EAAE,MAAM,4BAA4B,CAAC;AAC/G,mBAAmB,kCAAkC,CAAC;AACtD,YAAY,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/tasks/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,cAAc,CAAC;AA0CjD,mBAAmB,kCAAkC,CAAC;AACtD,mBAAmB,0CAA0C,CAAC;AAC9D,YAAY,EACX,mBAAmB,EACnB,0BAA0B,EAC1B,oBAAoB,EACpB,4BAA4B,EAC5B,2BAA2B,EAC3B,0BAA0B,EAC1B,gCAAgC,EAChC,+BAA+B,GAC/B,MAAM,6BAA6B,CAAC;AACrC,mBAAmB,yCAAyC,CAAC;AAC7D,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,uBAAuB,CAAC;AAC3C,YAAY,EACX,wBAAwB,EACxB,yBAAyB,EACzB,gCAAgC,EAChC,6BAA6B,GAC7B,MAAM,kCAAkC,CAAC;AAC1C,mBAAmB,4BAA4B,CAAC;AAChD,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,2BAA2B,CAAC;AAC5G,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,8BAA8B,CAAC;AAClD,mBAAmB,8BAA8B,CAAC;AAClD,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,iCAAiC,CAAC;AACrD,mBAAmB,2BAA2B,CAAC;AAC/C,mBAAmB,sCAAsC,CAAC;AAC1D,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,2BAA2B,CAAC;AAC5G,YAAY,EAAE,qBAAqB,EAAE,iBAAiB,EAAE,kBAAkB,EAAE,MAAM,4BAA4B,CAAC;AAC/G,mBAAmB,kCAAkC,CAAC;AACtD,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,yBAAyB,CAAC;AACnF,YAAY,EACX,6BAA6B,EAC7B,uBAAuB,EACvB,wBAAwB,EACxB,+BAA+B,EAC/B,4BAA4B,GAC5B,MAAM,iCAAiC,CAAC;AACzC,YAAY,EACX,gCAAgC,EAChC,gCAAgC,EAChC,mBAAmB,EACnB,oBAAoB,EACpB,2BAA2B,EAC3B,qCAAqC,EACrC,kCAAkC,EAClC,yBAAyB,EACzB,uCAAuC,EACvC,0BAA0B,GAC1B,MAAM,6BAA6B,CAAC;AACrC,mBAAmB,kCAAkC,CAAC;AACtD,mBAAmB,uCAAuC,CAAC;AAC3D,mBAAmB,sCAAsC,CAAC;AAC1D,mBAAmB,4CAA4C,CAAC;AAChE,YAAY,EACX,WAAW,EACX,4BAA4B,EAC5B,gCAAgC,EAChC,6BAA6B,EAC7B,oCAAoC,GACpC,MAAM,wCAAwC,CAAC;AAEhD,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AAE1D;;GAEG;AACH,eAAO,MAAM,qBAAqB,EAAE,MAAM,CAAC,YAAY,EAAE,eAAe,EAAE,CA6DzE,CAAC;AAoBF,eAAO,MAAM,UAAU,EAAE,MAAM,CAAC,YAAY,EAAE,QAAQ,GAAG,SAAS,CAqDxD,CAAC;AAEX,MAAM,WAAW,WAAW;IAC3B,WAAW,EAAE,MAAM,CAAC;IACpB,EAAE,EAAE,MAAM,CAAC;CACX;AAED,MAAM,MAAM,aAAa,GACtB;IACA,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,EAAE,OAAO,CAAC;CACb,GACD;IACA,IAAI,EAAE,KAAK,CAAC;QACX,KAAK,EAAE,MAAM,CAAC;QACd,KAAK,EAAE,MAAM,CAAC;KACd,CAAC,CAAC;IACH,IAAI,EAAE,OAAO,CAAC;CACb,GACD;IACA,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,EAAE,KAAK,CAAC;CACX,GACD;IACA,KAAK,EAAE,MAAM,EAAE,EAAE,CAAC;IAClB,IAAI,EAAE,SAAS,CAAC;CACf,GACD;IACA,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;CACZ,GACD;IACA,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,KAAK,CAAC;QACb,GAAG,EAAE,MAAM,CAAC;QACZ,KAAK,EAAE,MAAM,CAAC;QACd,IAAI,EAAE,MAAM,CAAC;KACb,CAAC,CAAC;IACH,IAAI,EAAE,kBAAkB,CAAC;CACxB,CAAC;AAEL,MAAM,WAAW,QAAQ;IACxB,MAAM,EAAE,aAAa,EAAE,CAAC;IACxB,OAAO,EAAE,aAAa,EAAE,CAAC;CACzB;AAED,MAAM,WAAW,QAAQ;IACxB,QAAQ,EAAE,WAAW,EAAE,CAAC;IACxB,IAAI,EAAE,QAAQ,CAAC;IACf,EAAE,EAAE,YAAY,CAAC;IACjB,WAAW,CAAC,EAAE,YAAY,CAAC;IAC3B,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,KAAK,EAAE,MAAM,CAAC;IACd,SAAS,EAAE,eAAe,EAAE,CAAC;IAC7B,OAAO,EAAE,WAAW,EAAE,CAAC;IACvB,MAAM,EAAE,WAAW,EAAE,CAAC;IACtB,MAAM,EAAE,WAAW,EAAE,CAAC;IACtB,OAAO,EAAE,MAAM,CAAC;IAChB,YAAY,EAAE,MAAM,EAAE,CAAC;IACvB,SAAS,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,MAAM,cAAc,GAAG,IAAI,CAAC,QAAQ,EAAE,IAAI,GAAG,OAAO,GAAG,WAAW,CAAC,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"data.d.ts","sourceRoot":"","sources":["../../../../src/tasks/keypoint-detection/data.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,IAAI,CAAC;AAEzC,QAAA,MAAM,QAAQ,EAAE,cAyCf,CAAC;AAEF,eAAe,QAAQ,CAAC"}
|