@huggingface/tasks 0.11.1 → 0.11.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -4,7 +4,7 @@ This package contains the definition files (written in Typescript) for the huggi
4
4
 
5
5
  - **pipeline types** (a.k.a. **task types**) - used to determine which widget to display on the model page, and which inference API to run.
6
6
  - **default widget inputs** - when they aren't provided in the model card.
7
- - definitions and UI elements for **model libraries** (and soon for **dataset libraries**).
7
+ - definitions and UI elements for **model and dataset libraries**.
8
8
 
9
9
  Please add any missing ones to these definitions by opening a PR. Thanks 🔥
10
10
 
package/dist/index.cjs CHANGED
@@ -4034,6 +4034,135 @@ var taskData36 = {
4034
4034
  };
4035
4035
  var data_default36 = taskData36;
4036
4036
 
4037
+ // src/tasks/image-to-3d/data.ts
4038
+ var taskData37 = {
4039
+ datasets: [
4040
+ {
4041
+ description: "A large dataset of over 10 million 3D objects.",
4042
+ id: "allenai/objaverse-xl"
4043
+ },
4044
+ {
4045
+ description: "A dataset of isolated object images for evaluating image-to-3D models.",
4046
+ id: "dylanebert/iso3d"
4047
+ }
4048
+ ],
4049
+ demo: {
4050
+ inputs: [
4051
+ {
4052
+ filename: "image-to-3d-image-input.png",
4053
+ type: "img"
4054
+ }
4055
+ ],
4056
+ outputs: [
4057
+ {
4058
+ label: "Result",
4059
+ content: "image-to-3d-3d-output-filename.glb",
4060
+ type: "text"
4061
+ }
4062
+ ]
4063
+ },
4064
+ metrics: [],
4065
+ models: [
4066
+ {
4067
+ description: "Fast image-to-3D mesh model by Tencent.",
4068
+ id: "TencentARC/InstantMesh"
4069
+ },
4070
+ {
4071
+ description: "Fast image-to-3D mesh model by StabilityAI",
4072
+ id: "stabilityai/TripoSR"
4073
+ },
4074
+ {
4075
+ description: "A scaled up image-to-3D mesh model derived from TripoSR.",
4076
+ id: "hwjiang/Real3D"
4077
+ },
4078
+ {
4079
+ description: "Generative 3D gaussian splatting model.",
4080
+ id: "ashawkey/LGM"
4081
+ }
4082
+ ],
4083
+ spaces: [
4084
+ {
4085
+ description: "Leaderboard to evaluate image-to-3D models.",
4086
+ id: "dylanebert/3d-arena"
4087
+ },
4088
+ {
4089
+ description: "Image-to-3D demo with mesh outputs.",
4090
+ id: "TencentARC/InstantMesh"
4091
+ },
4092
+ {
4093
+ description: "Image-to-3D demo with mesh outputs.",
4094
+ id: "stabilityai/TripoSR"
4095
+ },
4096
+ {
4097
+ description: "Image-to-3D demo with mesh outputs.",
4098
+ id: "hwjiang/Real3D"
4099
+ },
4100
+ {
4101
+ description: "Image-to-3D demo with splat outputs.",
4102
+ id: "dylanebert/LGM-mini"
4103
+ }
4104
+ ],
4105
+ summary: "Image-to-3D models take in image input and produce 3D output.",
4106
+ widgetModels: [],
4107
+ youtubeId: ""
4108
+ };
4109
+ var data_default37 = taskData37;
4110
+
4111
+ // src/tasks/text-to-3d/data.ts
4112
+ var taskData38 = {
4113
+ datasets: [
4114
+ {
4115
+ description: "A large dataset of over 10 million 3D objects.",
4116
+ id: "allenai/objaverse-xl"
4117
+ },
4118
+ {
4119
+ description: "Descriptive captions for 3D objects in Objaverse.",
4120
+ id: "tiange/Cap3D"
4121
+ }
4122
+ ],
4123
+ demo: {
4124
+ inputs: [
4125
+ {
4126
+ label: "Prompt",
4127
+ content: "a cat statue",
4128
+ type: "text"
4129
+ }
4130
+ ],
4131
+ outputs: [
4132
+ {
4133
+ label: "Result",
4134
+ content: "text-to-3d-3d-output-filename.glb",
4135
+ type: "text"
4136
+ }
4137
+ ]
4138
+ },
4139
+ metrics: [],
4140
+ models: [
4141
+ {
4142
+ description: "Text-to-3D mesh model by OpenAI",
4143
+ id: "openai/shap-e"
4144
+ },
4145
+ {
4146
+ description: "Generative 3D gaussian splatting model.",
4147
+ id: "ashawkey/LGM"
4148
+ }
4149
+ ],
4150
+ spaces: [
4151
+ {
4152
+ description: "Text-to-3D demo with mesh outputs.",
4153
+ id: "hysts/Shap-E"
4154
+ },
4155
+ {
4156
+ description: "Text/image-to-3D demo with splat outputs.",
4157
+ id: "ashawkey/LGM"
4158
+ }
4159
+ ],
4160
+ summary: "Text-to-3D models take in text input and produce 3D output.",
4161
+ widgetModels: [],
4162
+ youtubeId: ""
4163
+ };
4164
+ var data_default38 = taskData38;
4165
+
4037
4166
  // src/tasks/index.ts
4038
4167
  var TASKS_MODEL_LIBRARIES = {
4039
4168
  "audio-classification": ["speechbrain", "transformers", "transformers.js"],
@@ -4091,8 +4220,8 @@ var TASKS_MODEL_LIBRARIES = {
4091
4220
  "zero-shot-classification": ["transformers", "transformers.js"],
4092
4221
  "zero-shot-image-classification": ["transformers", "transformers.js"],
4093
4222
  "zero-shot-object-detection": ["transformers", "transformers.js"],
4094
- "text-to-3d": [],
4095
- "image-to-3d": []
4223
+ "text-to-3d": ["diffusers"],
4224
+ "image-to-3d": ["diffusers"]
4096
4225
  };
4097
4226
  function getData(type, partialTaskData = data_default16) {
4098
4227
  return {
@@ -4150,8 +4279,8 @@ var TASKS_DATA = {
4150
4279
  "zero-shot-classification": getData("zero-shot-classification", data_default34),
4151
4280
  "zero-shot-image-classification": getData("zero-shot-image-classification", data_default35),
4152
4281
  "zero-shot-object-detection": getData("zero-shot-object-detection", data_default36),
4153
- "text-to-3d": getData("text-to-3d", data_default16),
4154
- "image-to-3d": getData("image-to-3d", data_default16)
4282
+ "text-to-3d": getData("text-to-3d", data_default38),
4283
+ "image-to-3d": getData("image-to-3d", data_default37)
4155
4284
  };
4156
4285
 
4157
4286
  // src/model-libraries-snippets.ts
@@ -4222,6 +4351,49 @@ var bm25s = (model) => [
4222
4351
 
4223
4352
  retriever = BM25HF.load_from_hub("${model.id}")`
4224
4353
  ];
4354
+ var depth_anything_v2 = (model) => {
4355
+ let encoder;
4356
+ let features;
4357
+ let out_channels;
4358
+ encoder = "<ENCODER>";
4359
+ features = "<NUMBER_OF_FEATURES>";
4360
+ out_channels = "<OUT_CHANNELS>";
4361
+ if (model.id === "depth-anything/Depth-Anything-V2-Small") {
4362
+ encoder = "vits";
4363
+ features = "64";
4364
+ out_channels = "[48, 96, 192, 384]";
4365
+ } else if (model.id === "depth-anything/Depth-Anything-V2-Base") {
4366
+ encoder = "vitb";
4367
+ features = "128";
4368
+ out_channels = "[96, 192, 384, 768]";
4369
+ } else if (model.id === "depth-anything/Depth-Anything-V2-Large") {
4370
+ encoder = "vitl";
4371
+ features = "256";
4372
+ out_channels = "[256, 512, 1024, 1024";
4373
+ }
4374
+ return [
4375
+ `
4376
+ # Install from https://github.com/DepthAnything/Depth-Anything-V2
4377
+
4378
+ # Load the model and infer depth from an image
4379
+ import cv2
4380
+ import torch
4381
+
4382
+ from depth_anything_v2.dpt import DepthAnythingV2
4383
+
4384
+ # instantiate the model
4385
+ model = DepthAnythingV2(encoder="${encoder}", features=${features}, out_channels=${out_channels})
4386
+
4387
+ # load the weights
4388
+ filepath = hf_hub_download(repo_id="${model.id}", filename="depth_anything_v2_${encoder}.pth", repo_type="model")
4389
+ state_dict = torch.load(filepath, map_location="cpu")
4390
+ model.load_state_dict(state_dict).eval()
4391
+
4392
+ raw_img = cv2.imread("your/image/path")
4393
+ depth = model.infer_image(raw_img) # HxW raw depth map in numpy
4394
+ `
4395
+ ];
4396
+ };
4225
4397
  var diffusers_default = (model) => [
4226
4398
  `from diffusers import DiffusionPipeline
4227
4399
 
@@ -4347,6 +4519,11 @@ from huggingface_hub import from_pretrained_keras
4347
4519
  model = from_pretrained_keras("${model.id}")
4348
4520
  `
4349
4521
  ];
4522
+ var mamba_ssm = (model) => [
4523
+ `from mamba_ssm import MambaLMHeadModel
4524
+
4525
+ model = MambaLMHeadModel.from_pretrained("${model.id}")`
4526
+ ];
4350
4527
  var mars5_tts = (model) => [
4351
4528
  `# Install from https://github.com/Camb-ai/MARS5-TTS
4352
4529
 
@@ -4911,6 +5088,14 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
4911
5088
  filter: false,
4912
5089
  countDownloads: `path:"adapter_config.json"`
4913
5090
  },
5091
+ "depth-anything-v2": {
5092
+ prettyLabel: "DepthAnythingV2",
5093
+ repoName: "Depth Anything V2",
5094
+ repoUrl: "https://github.com/DepthAnything/Depth-Anything-V2",
5095
+ snippets: depth_anything_v2,
5096
+ filter: false,
5097
+ countDownloads: `path_extension:"pth"`
5098
+ },
4914
5099
  diffusers: {
4915
5100
  prettyLabel: "Diffusers",
4916
5101
  repoName: "\u{1F917}/diffusers",
@@ -5053,11 +5238,25 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
5053
5238
  repoName: "k2",
5054
5239
  repoUrl: "https://github.com/k2-fsa/k2"
5055
5240
  },
5241
+ liveportrait: {
5242
+ prettyLabel: "LivePortrait",
5243
+ repoName: "LivePortrait",
5244
+ repoUrl: "https://github.com/KwaiVGI/LivePortrait",
5245
+ filter: false,
5246
+ countDownloads: `path:"liveportrait/landmark.onnx"`
5247
+ },
5056
5248
  mindspore: {
5057
5249
  prettyLabel: "MindSpore",
5058
5250
  repoName: "mindspore",
5059
5251
  repoUrl: "https://github.com/mindspore-ai/mindspore"
5060
5252
  },
5253
+ "mamba-ssm": {
5254
+ prettyLabel: "MambaSSM",
5255
+ repoName: "MambaSSM",
5256
+ repoUrl: "https://github.com/state-spaces/mamba",
5257
+ filter: false,
5258
+ snippets: mamba_ssm
5259
+ },
5061
5260
  "mars5-tts": {
5062
5261
  prettyLabel: "MARS5-TTS",
5063
5262
  repoName: "MARS5-TTS",
@@ -6409,6 +6608,24 @@ var DATASET_LIBRARIES_UI_ELEMENTS = {
6409
6608
  repoName: "dask",
6410
6609
  repoUrl: "https://github.com/dask/dask",
6411
6610
  docsUrl: "https://huggingface.co/docs/hub/datasets-dask"
6611
+ },
6612
+ distilabel: {
6613
+ prettyLabel: "Distilabel",
6614
+ repoName: "distilabel",
6615
+ repoUrl: "https://github.com/argilla-io/distilabel",
6616
+ docsUrl: "https://distilabel.argilla.io"
6617
+ },
6618
+ fiftyone: {
6619
+ prettyLabel: "FiftyOne",
6620
+ repoName: "fiftyone",
6621
+ repoUrl: "https://github.com/voxel51/fiftyone",
6622
+ docsUrl: "https://docs.voxel51.com"
6623
+ },
6624
+ argilla: {
6625
+ prettyLabel: "Argilla",
6626
+ repoName: "argilla",
6627
+ repoUrl: "https://github.com/argilla-io/argilla",
6628
+ docsUrl: "https://argilla-io.github.io/argilla"
6412
6629
  }
6413
6630
  };
6414
6631
  // Annotate the CommonJS export names for ESM import in node:
package/dist/index.js CHANGED
@@ -3996,6 +3996,135 @@ var taskData36 = {
3996
3996
  };
3997
3997
  var data_default36 = taskData36;
3998
3998
 
3999
+ // src/tasks/image-to-3d/data.ts
4000
+ var taskData37 = {
4001
+ datasets: [
4002
+ {
4003
+ description: "A large dataset of over 10 million 3D objects.",
4004
+ id: "allenai/objaverse-xl"
4005
+ },
4006
+ {
4007
+ description: "A dataset of isolated object images for evaluating image-to-3D models.",
4008
+ id: "dylanebert/iso3d"
4009
+ }
4010
+ ],
4011
+ demo: {
4012
+ inputs: [
4013
+ {
4014
+ filename: "image-to-3d-image-input.png",
4015
+ type: "img"
4016
+ }
4017
+ ],
4018
+ outputs: [
4019
+ {
4020
+ label: "Result",
4021
+ content: "image-to-3d-3d-output-filename.glb",
4022
+ type: "text"
4023
+ }
4024
+ ]
4025
+ },
4026
+ metrics: [],
4027
+ models: [
4028
+ {
4029
+ description: "Fast image-to-3D mesh model by Tencent.",
4030
+ id: "TencentARC/InstantMesh"
4031
+ },
4032
+ {
4033
+ description: "Fast image-to-3D mesh model by StabilityAI",
4034
+ id: "stabilityai/TripoSR"
4035
+ },
4036
+ {
4037
+ description: "A scaled up image-to-3D mesh model derived from TripoSR.",
4038
+ id: "hwjiang/Real3D"
4039
+ },
4040
+ {
4041
+ description: "Generative 3D gaussian splatting model.",
4042
+ id: "ashawkey/LGM"
4043
+ }
4044
+ ],
4045
+ spaces: [
4046
+ {
4047
+ description: "Leaderboard to evaluate image-to-3D models.",
4048
+ id: "dylanebert/3d-arena"
4049
+ },
4050
+ {
4051
+ description: "Image-to-3D demo with mesh outputs.",
4052
+ id: "TencentARC/InstantMesh"
4053
+ },
4054
+ {
4055
+ description: "Image-to-3D demo with mesh outputs.",
4056
+ id: "stabilityai/TripoSR"
4057
+ },
4058
+ {
4059
+ description: "Image-to-3D demo with mesh outputs.",
4060
+ id: "hwjiang/Real3D"
4061
+ },
4062
+ {
4063
+ description: "Image-to-3D demo with splat outputs.",
4064
+ id: "dylanebert/LGM-mini"
4065
+ }
4066
+ ],
4067
+ summary: "Image-to-3D models take in image input and produce 3D output.",
4068
+ widgetModels: [],
4069
+ youtubeId: ""
4070
+ };
4071
+ var data_default37 = taskData37;
4072
+
4073
+ // src/tasks/text-to-3d/data.ts
4074
+ var taskData38 = {
4075
+ datasets: [
4076
+ {
4077
+ description: "A large dataset of over 10 million 3D objects.",
4078
+ id: "allenai/objaverse-xl"
4079
+ },
4080
+ {
4081
+ description: "Descriptive captions for 3D objects in Objaverse.",
4082
+ id: "tiange/Cap3D"
4083
+ }
4084
+ ],
4085
+ demo: {
4086
+ inputs: [
4087
+ {
4088
+ label: "Prompt",
4089
+ content: "a cat statue",
4090
+ type: "text"
4091
+ }
4092
+ ],
4093
+ outputs: [
4094
+ {
4095
+ label: "Result",
4096
+ content: "text-to-3d-3d-output-filename.glb",
4097
+ type: "text"
4098
+ }
4099
+ ]
4100
+ },
4101
+ metrics: [],
4102
+ models: [
4103
+ {
4104
+ description: "Text-to-3D mesh model by OpenAI",
4105
+ id: "openai/shap-e"
4106
+ },
4107
+ {
4108
+ description: "Generative 3D gaussian splatting model.",
4109
+ id: "ashawkey/LGM"
4110
+ }
4111
+ ],
4112
+ spaces: [
4113
+ {
4114
+ description: "Text-to-3D demo with mesh outputs.",
4115
+ id: "hysts/Shap-E"
4116
+ },
4117
+ {
4118
+ description: "Text/image-to-3D demo with splat outputs.",
4119
+ id: "ashawkey/LGM"
4120
+ }
4121
+ ],
4122
+ summary: "Text-to-3D models take in text input and produce 3D output.",
4123
+ widgetModels: [],
4124
+ youtubeId: ""
4125
+ };
4126
+ var data_default38 = taskData38;
4127
+
3999
4128
  // src/tasks/index.ts
4000
4129
  var TASKS_MODEL_LIBRARIES = {
4001
4130
  "audio-classification": ["speechbrain", "transformers", "transformers.js"],
@@ -4053,8 +4182,8 @@ var TASKS_MODEL_LIBRARIES = {
4053
4182
  "zero-shot-classification": ["transformers", "transformers.js"],
4054
4183
  "zero-shot-image-classification": ["transformers", "transformers.js"],
4055
4184
  "zero-shot-object-detection": ["transformers", "transformers.js"],
4056
- "text-to-3d": [],
4057
- "image-to-3d": []
4185
+ "text-to-3d": ["diffusers"],
4186
+ "image-to-3d": ["diffusers"]
4058
4187
  };
4059
4188
  function getData(type, partialTaskData = data_default16) {
4060
4189
  return {
@@ -4112,8 +4241,8 @@ var TASKS_DATA = {
4112
4241
  "zero-shot-classification": getData("zero-shot-classification", data_default34),
4113
4242
  "zero-shot-image-classification": getData("zero-shot-image-classification", data_default35),
4114
4243
  "zero-shot-object-detection": getData("zero-shot-object-detection", data_default36),
4115
- "text-to-3d": getData("text-to-3d", data_default16),
4116
- "image-to-3d": getData("image-to-3d", data_default16)
4244
+ "text-to-3d": getData("text-to-3d", data_default38),
4245
+ "image-to-3d": getData("image-to-3d", data_default37)
4117
4246
  };
4118
4247
 
4119
4248
  // src/model-libraries-snippets.ts
@@ -4184,6 +4313,49 @@ var bm25s = (model) => [
4184
4313
 
4185
4314
  retriever = BM25HF.load_from_hub("${model.id}")`
4186
4315
  ];
4316
+ var depth_anything_v2 = (model) => {
4317
+ let encoder;
4318
+ let features;
4319
+ let out_channels;
4320
+ encoder = "<ENCODER>";
4321
+ features = "<NUMBER_OF_FEATURES>";
4322
+ out_channels = "<OUT_CHANNELS>";
4323
+ if (model.id === "depth-anything/Depth-Anything-V2-Small") {
4324
+ encoder = "vits";
4325
+ features = "64";
4326
+ out_channels = "[48, 96, 192, 384]";
4327
+ } else if (model.id === "depth-anything/Depth-Anything-V2-Base") {
4328
+ encoder = "vitb";
4329
+ features = "128";
4330
+ out_channels = "[96, 192, 384, 768]";
4331
+ } else if (model.id === "depth-anything/Depth-Anything-V2-Large") {
4332
+ encoder = "vitl";
4333
+ features = "256";
4334
+ out_channels = "[256, 512, 1024, 1024";
4335
+ }
4336
+ return [
4337
+ `
4338
+ # Install from https://github.com/DepthAnything/Depth-Anything-V2
4339
+
4340
+ # Load the model and infer depth from an image
4341
+ import cv2
4342
+ import torch
4343
+
4344
+ from depth_anything_v2.dpt import DepthAnythingV2
4345
+
4346
+ # instantiate the model
4347
+ model = DepthAnythingV2(encoder="${encoder}", features=${features}, out_channels=${out_channels})
4348
+
4349
+ # load the weights
4350
+ filepath = hf_hub_download(repo_id="${model.id}", filename="depth_anything_v2_${encoder}.pth", repo_type="model")
4351
+ state_dict = torch.load(filepath, map_location="cpu")
4352
+ model.load_state_dict(state_dict).eval()
4353
+
4354
+ raw_img = cv2.imread("your/image/path")
4355
+ depth = model.infer_image(raw_img) # HxW raw depth map in numpy
4356
+ `
4357
+ ];
4358
+ };
4187
4359
  var diffusers_default = (model) => [
4188
4360
  `from diffusers import DiffusionPipeline
4189
4361
 
@@ -4309,6 +4481,11 @@ from huggingface_hub import from_pretrained_keras
4309
4481
  model = from_pretrained_keras("${model.id}")
4310
4482
  `
4311
4483
  ];
4484
+ var mamba_ssm = (model) => [
4485
+ `from mamba_ssm import MambaLMHeadModel
4486
+
4487
+ model = MambaLMHeadModel.from_pretrained("${model.id}")`
4488
+ ];
4312
4489
  var mars5_tts = (model) => [
4313
4490
  `# Install from https://github.com/Camb-ai/MARS5-TTS
4314
4491
 
@@ -4873,6 +5050,14 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
4873
5050
  filter: false,
4874
5051
  countDownloads: `path:"adapter_config.json"`
4875
5052
  },
5053
+ "depth-anything-v2": {
5054
+ prettyLabel: "DepthAnythingV2",
5055
+ repoName: "Depth Anything V2",
5056
+ repoUrl: "https://github.com/DepthAnything/Depth-Anything-V2",
5057
+ snippets: depth_anything_v2,
5058
+ filter: false,
5059
+ countDownloads: `path_extension:"pth"`
5060
+ },
4876
5061
  diffusers: {
4877
5062
  prettyLabel: "Diffusers",
4878
5063
  repoName: "\u{1F917}/diffusers",
@@ -5015,11 +5200,25 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
5015
5200
  repoName: "k2",
5016
5201
  repoUrl: "https://github.com/k2-fsa/k2"
5017
5202
  },
5203
+ liveportrait: {
5204
+ prettyLabel: "LivePortrait",
5205
+ repoName: "LivePortrait",
5206
+ repoUrl: "https://github.com/KwaiVGI/LivePortrait",
5207
+ filter: false,
5208
+ countDownloads: `path:"liveportrait/landmark.onnx"`
5209
+ },
5018
5210
  mindspore: {
5019
5211
  prettyLabel: "MindSpore",
5020
5212
  repoName: "mindspore",
5021
5213
  repoUrl: "https://github.com/mindspore-ai/mindspore"
5022
5214
  },
5215
+ "mamba-ssm": {
5216
+ prettyLabel: "MambaSSM",
5217
+ repoName: "MambaSSM",
5218
+ repoUrl: "https://github.com/state-spaces/mamba",
5219
+ filter: false,
5220
+ snippets: mamba_ssm
5221
+ },
5023
5222
  "mars5-tts": {
5024
5223
  prettyLabel: "MARS5-TTS",
5025
5224
  repoName: "MARS5-TTS",
@@ -6371,6 +6570,24 @@ var DATASET_LIBRARIES_UI_ELEMENTS = {
6371
6570
  repoName: "dask",
6372
6571
  repoUrl: "https://github.com/dask/dask",
6373
6572
  docsUrl: "https://huggingface.co/docs/hub/datasets-dask"
6573
+ },
6574
+ distilabel: {
6575
+ prettyLabel: "Distilabel",
6576
+ repoName: "distilabel",
6577
+ repoUrl: "https://github.com/argilla-io/distilabel",
6578
+ docsUrl: "https://distilabel.argilla.io"
6579
+ },
6580
+ fiftyone: {
6581
+ prettyLabel: "FiftyOne",
6582
+ repoName: "fiftyone",
6583
+ repoUrl: "https://github.com/voxel51/fiftyone",
6584
+ docsUrl: "https://docs.voxel51.com"
6585
+ },
6586
+ argilla: {
6587
+ prettyLabel: "Argilla",
6588
+ repoName: "argilla",
6589
+ repoUrl: "https://github.com/argilla-io/argilla",
6590
+ docsUrl: "https://argilla-io.github.io/argilla"
6374
6591
  }
6375
6592
  };
6376
6593
  export {
@@ -52,6 +52,24 @@ export declare const DATASET_LIBRARIES_UI_ELEMENTS: {
52
52
  repoUrl: string;
53
53
  docsUrl: string;
54
54
  };
55
+ distilabel: {
56
+ prettyLabel: string;
57
+ repoName: string;
58
+ repoUrl: string;
59
+ docsUrl: string;
60
+ };
61
+ fiftyone: {
62
+ prettyLabel: string;
63
+ repoName: string;
64
+ repoUrl: string;
65
+ docsUrl: string;
66
+ };
67
+ argilla: {
68
+ prettyLabel: string;
69
+ repoName: string;
70
+ repoUrl: string;
71
+ docsUrl: string;
72
+ };
55
73
  };
56
74
  export type DatasetLibraryKey = keyof typeof DATASET_LIBRARIES_UI_ELEMENTS;
57
75
  //# sourceMappingURL=dataset-libraries.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"dataset-libraries.d.ts","sourceRoot":"","sources":["../../src/dataset-libraries.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,MAAM,WAAW,uBAAuB;IACvC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;CACjB;AAED,eAAO,MAAM,6BAA6B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA+BS,CAAC;AAGpD,MAAM,MAAM,iBAAiB,GAAG,MAAM,OAAO,6BAA6B,CAAC"}
1
+ {"version":3,"file":"dataset-libraries.d.ts","sourceRoot":"","sources":["../../src/dataset-libraries.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,MAAM,WAAW,uBAAuB;IACvC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;CACjB;AAED,eAAO,MAAM,6BAA6B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAiDS,CAAC;AAGpD,MAAM,MAAM,iBAAiB,GAAG,MAAM,OAAO,6BAA6B,CAAC"}
@@ -5,6 +5,7 @@ export declare const asteroid: (model: ModelData) => string[];
5
5
  export declare const audioseal: (model: ModelData) => string[];
6
6
  export declare const bertopic: (model: ModelData) => string[];
7
7
  export declare const bm25s: (model: ModelData) => string[];
8
+ export declare const depth_anything_v2: (model: ModelData) => string[];
8
9
  export declare const diffusers: (model: ModelData) => string[];
9
10
  export declare const edsnlp: (model: ModelData) => string[];
10
11
  export declare const espnetTTS: (model: ModelData) => string[];
@@ -16,6 +17,7 @@ export declare const gliner: (model: ModelData) => string[];
16
17
  export declare const keras: (model: ModelData) => string[];
17
18
  export declare const keras_nlp: (model: ModelData) => string[];
18
19
  export declare const tf_keras: (model: ModelData) => string[];
20
+ export declare const mamba_ssm: (model: ModelData) => string[];
19
21
  export declare const mars5_tts: (model: ModelData) => string[];
20
22
  export declare const mesh_anything: () => string[];
21
23
  export declare const open_clip: (model: ModelData) => string[];
@@ -1 +1 @@
1
- {"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAY9C,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAMF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AA+BF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAS9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AAEF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAI7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EA4CrD,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,OAAO,QAAO,MAAM,EAYhC,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAK5C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AA6BF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAUnD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAYnC,CAAC"}
1
+ {"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAY9C,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAMF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,iBAAiB,UAAW,SAAS,KAAG,MAAM,EA6C1D,CAAC;AA+BF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAS9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AAEF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAI7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EA4CrD,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,OAAO,QAAO,MAAM,EAYhC,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAK5C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AA6BF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAUnD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAYnC,CAAC"}
@@ -139,6 +139,14 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
139
139
  filter: false;
140
140
  countDownloads: string;
141
141
  };
142
+ "depth-anything-v2": {
143
+ prettyLabel: string;
144
+ repoName: string;
145
+ repoUrl: string;
146
+ snippets: (model: ModelData) => string[];
147
+ filter: false;
148
+ countDownloads: string;
149
+ };
142
150
  diffusers: {
143
151
  prettyLabel: string;
144
152
  repoName: string;
@@ -279,11 +287,25 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
279
287
  repoName: string;
280
288
  repoUrl: string;
281
289
  };
290
+ liveportrait: {
291
+ prettyLabel: string;
292
+ repoName: string;
293
+ repoUrl: string;
294
+ filter: false;
295
+ countDownloads: string;
296
+ };
282
297
  mindspore: {
283
298
  prettyLabel: string;
284
299
  repoName: string;
285
300
  repoUrl: string;
286
301
  };
302
+ "mamba-ssm": {
303
+ prettyLabel: string;
304
+ repoName: string;
305
+ repoUrl: string;
306
+ filter: false;
307
+ snippets: (model: ModelData) => string[];
308
+ };
287
309
  "mars5-tts": {
288
310
  prettyLabel: string;
289
311
  repoName: string;
@@ -549,6 +571,6 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
549
571
  };
550
572
  };
551
573
  export type ModelLibraryKey = keyof typeof MODEL_LIBRARIES_UI_ELEMENTS;
552
- export declare const ALL_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "bm25s" | "champ" | "chat_tts" | "colpali" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "mindspore" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
553
- export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "bm25s" | "champ" | "chat_tts" | "colpali" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "mindspore" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
574
+ export declare const ALL_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "bm25s" | "champ" | "chat_tts" | "colpali" | "depth-anything-v2" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
575
+ export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "bm25s" | "champ" | "chat_tts" | "colpali" | "depth-anything-v2" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
554
576
  //# sourceMappingURL=model-libraries.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAgfI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,43BAAgE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,43BAQ1B,CAAC"}
1
+ {"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAsgBI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,i7BAAgE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,i7BAQ1B,CAAC"}
@@ -0,0 +1,4 @@
1
+ import type { TaskDataCustom } from "..";
2
+ declare const taskData: TaskDataCustom;
3
+ export default taskData;
4
+ //# sourceMappingURL=data.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"data.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-to-3d/data.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,IAAI,CAAC;AAEzC,QAAA,MAAM,QAAQ,EAAE,cAsEf,CAAC;AAEF,eAAe,QAAQ,CAAC"}
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/tasks/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,cAAc,CAAC;AAwCjD,mBAAmB,kCAAkC,CAAC;AACtD,mBAAmB,0CAA0C,CAAC;AAC9D,YAAY,EACX,mBAAmB,EACnB,0BAA0B,EAC1B,oBAAoB,EACpB,4BAA4B,EAC5B,2BAA2B,EAC3B,0BAA0B,EAC1B,gCAAgC,EAChC,+BAA+B,GAC/B,MAAM,6BAA6B,CAAC;AACrC,mBAAmB,yCAAyC,CAAC;AAC7D,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,uBAAuB,CAAC;AAC3C,YAAY,EACX,wBAAwB,EACxB,yBAAyB,EACzB,gCAAgC,EAChC,6BAA6B,GAC7B,MAAM,kCAAkC,CAAC;AAC1C,mBAAmB,4BAA4B,CAAC;AAChD,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,2BAA2B,CAAC;AAC5G,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,8BAA8B,CAAC;AAClD,mBAAmB,8BAA8B,CAAC;AAClD,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,iCAAiC,CAAC;AACrD,mBAAmB,2BAA2B,CAAC;AAC/C,mBAAmB,sCAAsC,CAAC;AAC1D,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,2BAA2B,CAAC;AAC5G,YAAY,EAAE,qBAAqB,EAAE,iBAAiB,EAAE,kBAAkB,EAAE,MAAM,4BAA4B,CAAC;AAC/G,mBAAmB,kCAAkC,CAAC;AACtD,YAAY,EACX,6BAA6B,EAC7B,qCAAqC,EACrC,gBAAgB,EAChB,iBAAiB,GACjB,MAAM,yBAAyB,CAAC;AACjC,YAAY,EACX,6BAA6B,EAC7B,uBAAuB,EACvB,wBAAwB,EACxB,+BAA+B,EAC/B,4BAA4B,GAC5B,MAAM,iCAAiC,CAAC;AACzC,YAAY,EACX,gCAAgC,EAChC,gCAAgC,EAChC,mBAAmB,EACnB,oBAAoB,EACpB,2BAA2B,EAC3B,qCAAqC,EACrC,kCAAkC,EAClC,yBAAyB,EACzB,uCAAuC,EACvC,0BAA0B,GAC1B,MAAM,6BAA6B,CAAC;AACrC,mBAAmB,kCAAkC,CAAC;AACtD,mBAAmB,uCAAuC,CAAC;AAC3D,mBAAmB,sCAAsC,CAAC;AAC1D,mBAAmB,4CAA4C,CAAC;AAChE,YAAY,EACX,WAAW,EACX,4BAA4B,EAC5B,gCAAgC,EAChC,6BAA6B,EAC7B,oCAAoC,GACpC,MAAM,wCAAwC,CAAC;AAEhD,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AAE1D;;GAEG;AACH,eAAO,MAAM,qBAAqB,EAAE,MAAM,CAAC,YAAY,EAAE,eAAe,EAAE,CA0DzE,CAAC;AAoBF,eAAO,MAAM,UAAU,EAAE,MAAM,CAAC,YAAY,EAAE,QAAQ,GAAG,SAAS,CAkDxD,CAAC;AAEX,MAAM,WAAW,WAAW;IAC3B,WAAW,EAAE,MAAM,CAAC;IACpB,EAAE,EAAE,MAAM,CAAC;CACX;AAED,MAAM,MAAM,aAAa,GACtB;IACA,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,EAAE,OAAO,CAAC;CACb,GACD;IACA,IAAI,EAAE,KAAK,CAAC;QACX,KAAK,EAAE,MAAM,CAAC;QACd,KAAK,EAAE,MAAM,CAAC;KACd,CAAC,CAAC;IACH,IAAI,EAAE,OAAO,CAAC;CACb,GACD;IACA,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,EAAE,KAAK,CAAC;CACX,GACD;IACA,KAAK,EAAE,MAAM,EAAE,EAAE,CAAC;IAClB,IAAI,EAAE,SAAS,CAAC;CACf,GACD;IACA,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;CACZ,GACD;IACA,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,KAAK,CAAC;QACb,GAAG,EAAE,MAAM,CAAC;QACZ,KAAK,EAAE,MAAM,CAAC;QACd,IAAI,EAAE,MAAM,CAAC;KACb,CAAC,CAAC;IACH,IAAI,EAAE,kBAAkB,CAAC;CACxB,CAAC;AAEL,MAAM,WAAW,QAAQ;IACxB,MAAM,EAAE,aAAa,EAAE,CAAC;IACxB,OAAO,EAAE,aAAa,EAAE,CAAC;CACzB;AAED,MAAM,WAAW,QAAQ;IACxB,QAAQ,EAAE,WAAW,EAAE,CAAC;IACxB,IAAI,EAAE,QAAQ,CAAC;IACf,EAAE,EAAE,YAAY,CAAC;IACjB,WAAW,CAAC,EAAE,YAAY,CAAC;IAC3B,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,KAAK,EAAE,MAAM,CAAC;IACd,SAAS,EAAE,eAAe,EAAE,CAAC;IAC7B,OAAO,EAAE,WAAW,EAAE,CAAC;IACvB,MAAM,EAAE,WAAW,EAAE,CAAC;IACtB,MAAM,EAAE,WAAW,EAAE,CAAC;IACtB,OAAO,EAAE,MAAM,CAAC;IAChB,YAAY,EAAE,MAAM,EAAE,CAAC;IACvB,SAAS,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,MAAM,cAAc,GAAG,IAAI,CAAC,QAAQ,EAAE,IAAI,GAAG,OAAO,GAAG,WAAW,CAAC,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/tasks/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,cAAc,CAAC;AA0CjD,mBAAmB,kCAAkC,CAAC;AACtD,mBAAmB,0CAA0C,CAAC;AAC9D,YAAY,EACX,mBAAmB,EACnB,0BAA0B,EAC1B,oBAAoB,EACpB,4BAA4B,EAC5B,2BAA2B,EAC3B,0BAA0B,EAC1B,gCAAgC,EAChC,+BAA+B,GAC/B,MAAM,6BAA6B,CAAC;AACrC,mBAAmB,yCAAyC,CAAC;AAC7D,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,uBAAuB,CAAC;AAC3C,YAAY,EACX,wBAAwB,EACxB,yBAAyB,EACzB,gCAAgC,EAChC,6BAA6B,GAC7B,MAAM,kCAAkC,CAAC;AAC1C,mBAAmB,4BAA4B,CAAC;AAChD,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,2BAA2B,CAAC;AAC5G,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,8BAA8B,CAAC;AAClD,mBAAmB,8BAA8B,CAAC;AAClD,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,iCAAiC,CAAC;AACrD,mBAAmB,2BAA2B,CAAC;AAC/C,mBAAmB,sCAAsC,CAAC;AAC1D,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,2BAA2B,CAAC;AAC5G,YAAY,EAAE,qBAAqB,EAAE,iBAAiB,EAAE,kBAAkB,EAAE,MAAM,4BAA4B,CAAC;AAC/G,mBAAmB,kCAAkC,CAAC;AACtD,YAAY,EACX,6BAA6B,EAC7B,qCAAqC,EACrC,gBAAgB,EAChB,iBAAiB,GACjB,MAAM,yBAAyB,CAAC;AACjC,YAAY,EACX,6BAA6B,EAC7B,uBAAuB,EACvB,wBAAwB,EACxB,+BAA+B,EAC/B,4BAA4B,GAC5B,MAAM,iCAAiC,CAAC;AACzC,YAAY,EACX,gCAAgC,EAChC,gCAAgC,EAChC,mBAAmB,EACnB,oBAAoB,EACpB,2BAA2B,EAC3B,qCAAqC,EACrC,kCAAkC,EAClC,yBAAyB,EACzB,uCAAuC,EACvC,0BAA0B,GAC1B,MAAM,6BAA6B,CAAC;AACrC,mBAAmB,kCAAkC,CAAC;AACtD,mBAAmB,uCAAuC,CAAC;AAC3D,mBAAmB,sCAAsC,CAAC;AAC1D,mBAAmB,4CAA4C,CAAC;AAChE,YAAY,EACX,WAAW,EACX,4BAA4B,EAC5B,gCAAgC,EAChC,6BAA6B,EAC7B,oCAAoC,GACpC,MAAM,wCAAwC,CAAC;AAEhD,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AAE1D;;GAEG;AACH,eAAO,MAAM,qBAAqB,EAAE,MAAM,CAAC,YAAY,EAAE,eAAe,EAAE,CA0DzE,CAAC;AAoBF,eAAO,MAAM,UAAU,EAAE,MAAM,CAAC,YAAY,EAAE,QAAQ,GAAG,SAAS,CAkDxD,CAAC;AAEX,MAAM,WAAW,WAAW;IAC3B,WAAW,EAAE,MAAM,CAAC;IACpB,EAAE,EAAE,MAAM,CAAC;CACX;AAED,MAAM,MAAM,aAAa,GACtB;IACA,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,EAAE,OAAO,CAAC;CACb,GACD;IACA,IAAI,EAAE,KAAK,CAAC;QACX,KAAK,EAAE,MAAM,CAAC;QACd,KAAK,EAAE,MAAM,CAAC;KACd,CAAC,CAAC;IACH,IAAI,EAAE,OAAO,CAAC;CACb,GACD;IACA,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,EAAE,KAAK,CAAC;CACX,GACD;IACA,KAAK,EAAE,MAAM,EAAE,EAAE,CAAC;IAClB,IAAI,EAAE,SAAS,CAAC;CACf,GACD;IACA,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;CACZ,GACD;IACA,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,KAAK,CAAC;QACb,GAAG,EAAE,MAAM,CAAC;QACZ,KAAK,EAAE,MAAM,CAAC;QACd,IAAI,EAAE,MAAM,CAAC;KACb,CAAC,CAAC;IACH,IAAI,EAAE,kBAAkB,CAAC;CACxB,CAAC;AAEL,MAAM,WAAW,QAAQ;IACxB,MAAM,EAAE,aAAa,EAAE,CAAC;IACxB,OAAO,EAAE,aAAa,EAAE,CAAC;CACzB;AAED,MAAM,WAAW,QAAQ;IACxB,QAAQ,EAAE,WAAW,EAAE,CAAC;IACxB,IAAI,EAAE,QAAQ,CAAC;IACf,EAAE,EAAE,YAAY,CAAC;IACjB,WAAW,CAAC,EAAE,YAAY,CAAC;IAC3B,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,KAAK,EAAE,MAAM,CAAC;IACd,SAAS,EAAE,eAAe,EAAE,CAAC;IAC7B,OAAO,EAAE,WAAW,EAAE,CAAC;IACvB,MAAM,EAAE,WAAW,EAAE,CAAC;IACtB,MAAM,EAAE,WAAW,EAAE,CAAC;IACtB,OAAO,EAAE,MAAM,CAAC;IAChB,YAAY,EAAE,MAAM,EAAE,CAAC;IACvB,SAAS,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,MAAM,cAAc,GAAG,IAAI,CAAC,QAAQ,EAAE,IAAI,GAAG,OAAO,GAAG,WAAW,CAAC,CAAC"}
@@ -0,0 +1,4 @@
1
+ import type { TaskDataCustom } from "..";
2
+ declare const taskData: TaskDataCustom;
3
+ export default taskData;
4
+ //# sourceMappingURL=data.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"data.d.ts","sourceRoot":"","sources":["../../../../src/tasks/text-to-3d/data.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,IAAI,CAAC;AAEzC,QAAA,MAAM,QAAQ,EAAE,cAmDf,CAAC;AAEF,eAAe,QAAQ,CAAC"}
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@huggingface/tasks",
3
3
  "packageManager": "pnpm@8.10.5",
4
- "version": "0.11.1",
4
+ "version": "0.11.3",
5
5
  "description": "List of ML tasks for huggingface.co/tasks",
6
6
  "repository": "https://github.com/huggingface/huggingface.js.git",
7
7
  "publishConfig": {
@@ -53,6 +53,24 @@ export const DATASET_LIBRARIES_UI_ELEMENTS = {
53
53
  repoUrl: "https://github.com/dask/dask",
54
54
  docsUrl: "https://huggingface.co/docs/hub/datasets-dask",
55
55
  },
56
+ distilabel: {
57
+ prettyLabel: "Distilabel",
58
+ repoName: "distilabel",
59
+ repoUrl: "https://github.com/argilla-io/distilabel",
60
+ docsUrl: "https://distilabel.argilla.io",
61
+ },
62
+ fiftyone: {
63
+ prettyLabel: "FiftyOne",
64
+ repoName: "fiftyone",
65
+ repoUrl: "https://github.com/voxel51/fiftyone",
66
+ docsUrl: "https://docs.voxel51.com",
67
+ },
68
+ argilla: {
69
+ prettyLabel: "Argilla",
70
+ repoName: "argilla",
71
+ repoUrl: "https://github.com/argilla-io/argilla",
72
+ docsUrl: "https://argilla-io.github.io/argilla",
73
+ },
56
74
  } satisfies Record<string, DatasetLibraryUiElement>;
57
75
 
58
76
  /// List of the dataset libraries supported by the Hub
@@ -82,6 +82,53 @@ export const bm25s = (model: ModelData): string[] => [
82
82
  retriever = BM25HF.load_from_hub("${model.id}")`,
83
83
  ];
84
84
 
85
+ export const depth_anything_v2 = (model: ModelData): string[] => {
86
+ let encoder: string;
87
+ let features: string;
88
+ let out_channels: string;
89
+
90
+ encoder = "<ENCODER>";
91
+ features = "<NUMBER_OF_FEATURES>";
92
+ out_channels = "<OUT_CHANNELS>";
93
+
94
+ if (model.id === "depth-anything/Depth-Anything-V2-Small") {
95
+ encoder = "vits";
96
+ features = "64";
97
+ out_channels = "[48, 96, 192, 384]";
98
+ } else if (model.id === "depth-anything/Depth-Anything-V2-Base") {
99
+ encoder = "vitb";
100
+ features = "128";
101
+ out_channels = "[96, 192, 384, 768]";
102
+ } else if (model.id === "depth-anything/Depth-Anything-V2-Large") {
103
+ encoder = "vitl";
104
+ features = "256";
105
+ out_channels = "[256, 512, 1024, 1024";
106
+ }
107
+
108
+ return [
109
+ `
110
+ # Install from https://github.com/DepthAnything/Depth-Anything-V2
111
+
112
+ # Load the model and infer depth from an image
113
+ import cv2
114
+ import torch
115
+
116
+ from depth_anything_v2.dpt import DepthAnythingV2
117
+
118
+ # instantiate the model
119
+ model = DepthAnythingV2(encoder="${encoder}", features=${features}, out_channels=${out_channels})
120
+
121
+ # load the weights
122
+ filepath = hf_hub_download(repo_id="${model.id}", filename="depth_anything_v2_${encoder}.pth", repo_type="model")
123
+ state_dict = torch.load(filepath, map_location="cpu")
124
+ model.load_state_dict(state_dict).eval()
125
+
126
+ raw_img = cv2.imread("your/image/path")
127
+ depth = model.infer_image(raw_img) # HxW raw depth map in numpy
128
+ `,
129
+ ];
130
+ };
131
+
85
132
  const diffusers_default = (model: ModelData) => [
86
133
  `from diffusers import DiffusionPipeline
87
134
 
@@ -223,6 +270,12 @@ model = from_pretrained_keras("${model.id}")
223
270
  `,
224
271
  ];
225
272
 
273
+ export const mamba_ssm = (model: ModelData): string[] => [
274
+ `from mamba_ssm import MambaLMHeadModel
275
+
276
+ model = MambaLMHeadModel.from_pretrained("${model.id}")`,
277
+ ];
278
+
226
279
  export const mars5_tts = (model: ModelData): string[] => [
227
280
  `# Install from https://github.com/Camb-ai/MARS5-TTS
228
281
 
@@ -143,6 +143,14 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
143
143
  filter: false,
144
144
  countDownloads: `path:"adapter_config.json"`,
145
145
  },
146
+ "depth-anything-v2": {
147
+ prettyLabel: "DepthAnythingV2",
148
+ repoName: "Depth Anything V2",
149
+ repoUrl: "https://github.com/DepthAnything/Depth-Anything-V2",
150
+ snippets: snippets.depth_anything_v2,
151
+ filter: false,
152
+ countDownloads: `path_extension:"pth"`,
153
+ },
146
154
  diffusers: {
147
155
  prettyLabel: "Diffusers",
148
156
  repoName: "🤗/diffusers",
@@ -285,11 +293,25 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
285
293
  repoName: "k2",
286
294
  repoUrl: "https://github.com/k2-fsa/k2",
287
295
  },
296
+ liveportrait: {
297
+ prettyLabel: "LivePortrait",
298
+ repoName: "LivePortrait",
299
+ repoUrl: "https://github.com/KwaiVGI/LivePortrait",
300
+ filter: false,
301
+ countDownloads: `path:"liveportrait/landmark.onnx"`,
302
+ },
288
303
  mindspore: {
289
304
  prettyLabel: "MindSpore",
290
305
  repoName: "mindspore",
291
306
  repoUrl: "https://github.com/mindspore-ai/mindspore",
292
307
  },
308
+ "mamba-ssm": {
309
+ prettyLabel: "MambaSSM",
310
+ repoName: "MambaSSM",
311
+ repoUrl: "https://github.com/state-spaces/mamba",
312
+ filter: false,
313
+ snippets: snippets.mamba_ssm,
314
+ },
293
315
  "mars5-tts": {
294
316
  prettyLabel: "MARS5-TTS",
295
317
  repoName: "MARS5-TTS",
@@ -0,0 +1,62 @@
1
+ ## Use Cases
2
+
3
+ Image-to-3D models can be used in a wide variety of applications that require 3D, such as games, animation, design, architecture, engineering, marketing, and more.
4
+
5
+ ![Image-to-3D Thumbnail](https://huggingface.co/datasets/huggingfacejs/tasks/resolve/main/image-to-3d/image-to-3d-thumbnail.png)
6
+
7
+ ### Generating Meshes
8
+
9
+ Meshes are the standard representation of 3D in industry.
10
+
11
+ ### Generating Gaussian Splats
12
+
13
+ [Gaussian Splatting](https://huggingface.co/blog/gaussian-splatting) is a rendering technique that represents scenes as fuzzy points.
14
+
15
+ ### Inference
16
+
17
+ Inference for this task typically leverages the [Diffusers](https://huggingface.co/docs/diffusers/index) library for inference, using [Custom Pipelines](https://huggingface.co/docs/diffusers/v0.6.0/en/using-diffusers/custom_pipelines).
18
+
19
+ These are unstandardized and depend on the model. More details can be found in each model repository.
20
+
21
+ ```python
22
+ import torch
23
+ import requests
24
+ import numpy as np
25
+ from io import BytesIO
26
+ from diffusers import DiffusionPipeline
27
+ from PIL import Image
28
+
29
+ pipeline = DiffusionPipeline.from_pretrained(
30
+ "dylanebert/LGM-full",
31
+ custom_pipeline="dylanebert/LGM-full",
32
+ torch_dtype=torch.float16,
33
+ trust_remote_code=True,
34
+ ).to("cuda")
35
+
36
+ input_url = "https://huggingface.co/datasets/dylanebert/iso3d/resolve/main/jpg@512/a_cat_statue.jpg"
37
+ input_image = Image.open(BytesIO(requests.get(input_url).content))
38
+ input_image = np.array(input_image, dtype=np.float32) / 255.0
39
+ result = pipeline("", input_image)
40
+ result_path = "/tmp/output.ply"
41
+ pipeline.save_ply(result, result_path)
42
+ ```
43
+
44
+ In the code above, we:
45
+
46
+ 1. Import the necessary libraries
47
+ 2. Load the `LGM-full` model and custom pipeline
48
+ 3. Load and preprocess the input image
49
+ 4. Run the pipeline on the input image
50
+ 5. Save the output to a file
51
+
52
+ ### Output Formats
53
+
54
+ Meshes can be in `.obj`, `.glb`, `.stl`, or `.gltf` format. Other formats are allowed, but won't be rendered in the gradio [Model3D](https://www.gradio.app/docs/gradio/model3d) component.
55
+
56
+ Splats can be in `.ply` or `.splat` format. They can be rendered in the gradio [Model3D](https://www.gradio.app/docs/gradio/model3d) component using the [gsplat.js](https://github.com/huggingface/gsplat.js) library.
57
+
58
+ ## Useful Resources
59
+
60
+ - [ML for 3D Course](https://huggingface.co/learn/ml-for-3d-course)
61
+ - [3D Arena Leaderboard](https://huggingface.co/spaces/dylanebert/3d-arena)
62
+ - [gsplat.js](https://github.com/huggingface/gsplat.js)
@@ -0,0 +1,75 @@
1
+ import type { TaskDataCustom } from "..";
2
+
3
+ const taskData: TaskDataCustom = {
4
+ datasets: [
5
+ {
6
+ description: "A large dataset of over 10 million 3D objects.",
7
+ id: "allenai/objaverse-xl",
8
+ },
9
+ {
10
+ description: "A dataset of isolated object images for evaluating image-to-3D models.",
11
+ id: "dylanebert/iso3d",
12
+ },
13
+ ],
14
+ demo: {
15
+ inputs: [
16
+ {
17
+ filename: "image-to-3d-image-input.png",
18
+ type: "img",
19
+ },
20
+ ],
21
+ outputs: [
22
+ {
23
+ label: "Result",
24
+ content: "image-to-3d-3d-output-filename.glb",
25
+ type: "text",
26
+ },
27
+ ],
28
+ },
29
+ metrics: [],
30
+ models: [
31
+ {
32
+ description: "Fast image-to-3D mesh model by Tencent.",
33
+ id: "TencentARC/InstantMesh",
34
+ },
35
+ {
36
+ description: "Fast image-to-3D mesh model by StabilityAI",
37
+ id: "stabilityai/TripoSR",
38
+ },
39
+ {
40
+ description: "A scaled up image-to-3D mesh model derived from TripoSR.",
41
+ id: "hwjiang/Real3D",
42
+ },
43
+ {
44
+ description: "Generative 3D gaussian splatting model.",
45
+ id: "ashawkey/LGM",
46
+ },
47
+ ],
48
+ spaces: [
49
+ {
50
+ description: "Leaderboard to evaluate image-to-3D models.",
51
+ id: "dylanebert/3d-arena",
52
+ },
53
+ {
54
+ description: "Image-to-3D demo with mesh outputs.",
55
+ id: "TencentARC/InstantMesh",
56
+ },
57
+ {
58
+ description: "Image-to-3D demo with mesh outputs.",
59
+ id: "stabilityai/TripoSR",
60
+ },
61
+ {
62
+ description: "Image-to-3D demo with mesh outputs.",
63
+ id: "hwjiang/Real3D",
64
+ },
65
+ {
66
+ description: "Image-to-3D demo with splat outputs.",
67
+ id: "dylanebert/LGM-mini",
68
+ },
69
+ ],
70
+ summary: "Image-to-3D models take in image input and produce 3D output.",
71
+ widgetModels: [],
72
+ youtubeId: "",
73
+ };
74
+
75
+ export default taskData;
@@ -37,6 +37,8 @@ import visualQuestionAnswering from "./visual-question-answering/data";
37
37
  import zeroShotClassification from "./zero-shot-classification/data";
38
38
  import zeroShotImageClassification from "./zero-shot-image-classification/data";
39
39
  import zeroShotObjectDetection from "./zero-shot-object-detection/data";
40
+ import imageTo3D from "./image-to-3d/data";
41
+ import textTo3D from "./text-to-3d/data";
40
42
 
41
43
  export type * from "./audio-classification/inference";
42
44
  export type * from "./automatic-speech-recognition/inference";
@@ -169,8 +171,8 @@ export const TASKS_MODEL_LIBRARIES: Record<PipelineType, ModelLibraryKey[]> = {
169
171
  "zero-shot-classification": ["transformers", "transformers.js"],
170
172
  "zero-shot-image-classification": ["transformers", "transformers.js"],
171
173
  "zero-shot-object-detection": ["transformers", "transformers.js"],
172
- "text-to-3d": [],
173
- "image-to-3d": [],
174
+ "text-to-3d": ["diffusers"],
175
+ "image-to-3d": ["diffusers"],
174
176
  };
175
177
 
176
178
  /**
@@ -239,8 +241,8 @@ export const TASKS_DATA: Record<PipelineType, TaskData | undefined> = {
239
241
  "zero-shot-classification": getData("zero-shot-classification", zeroShotClassification),
240
242
  "zero-shot-image-classification": getData("zero-shot-image-classification", zeroShotImageClassification),
241
243
  "zero-shot-object-detection": getData("zero-shot-object-detection", zeroShotObjectDetection),
242
- "text-to-3d": getData("text-to-3d", placeholder),
243
- "image-to-3d": getData("image-to-3d", placeholder),
244
+ "text-to-3d": getData("text-to-3d", textTo3D),
245
+ "image-to-3d": getData("image-to-3d", imageTo3D),
244
246
  } as const;
245
247
 
246
248
  export interface ExampleRepo {
@@ -0,0 +1,62 @@
1
+ ## Use Cases
2
+
3
+ Text-to-3D models can be used in a wide variety of applications that require 3D, such as games, animation, design, architecture, engineering, marketing, and more.
4
+
5
+ ![Text-to-3D Thumbnail](https://huggingface.co/datasets/huggingfacejs/tasks/resolve/main/text-to-3d/text-to-3d-thumbnail.png)
6
+
7
+ This task is similar to the [image-to-3d](https://huggingface.co/tasks/image-to-3d) task, but takes text input instead of image input. In practice, this is often equivalent to a combination of [text-to-image](https://huggingface.co/tasks/text-to-image) and [image-to-3d](https://huggingface.co/tasks/image-to-3d). That is, the text is first converted to an image, then the image is converted to 3D.
8
+
9
+ ### Generating Meshes
10
+
11
+ Meshes are the standard representation of 3D in industry.
12
+
13
+ ### Generating Gaussian Splats
14
+
15
+ [Gaussian Splatting](https://huggingface.co/blog/gaussian-splatting) is a rendering technique that represents scenes as fuzzy points.
16
+
17
+ ### Inference
18
+
19
+ Inference for this task typically leverages the [Diffusers](https://huggingface.co/docs/diffusers/index) library for inference, using [Custom Pipelines](https://huggingface.co/docs/diffusers/v0.6.0/en/using-diffusers/custom_pipelines).
20
+
21
+ These are unstandardized and depend on the model. More details can be found in each model repository.
22
+
23
+ ```python
24
+ import torch
25
+ import requests
26
+ import numpy as np
27
+ from io import BytesIO
28
+ from diffusers import DiffusionPipeline
29
+ from PIL import Image
30
+
31
+ pipeline = DiffusionPipeline.from_pretrained(
32
+ "dylanebert/LGM-full",
33
+ custom_pipeline="dylanebert/LGM-full",
34
+ torch_dtype=torch.float16,
35
+ trust_remote_code=True,
36
+ ).to("cuda")
37
+
38
+ input_prompt = "a cat statue"
39
+ result = pipeline(input_prompt, None)
40
+ result_path = "/tmp/output.ply"
41
+ pipeline.save_ply(result, result_path)
42
+ ```
43
+
44
+ In the code above, we:
45
+
46
+ 1. Import the necessary libraries
47
+ 2. Load the `LGM-full` model and custom pipeline
48
+ 3. Define the input prompt
49
+ 4. Run the pipeline on the input prompt
50
+ 5. Save the output to a file
51
+
52
+ ### Output Formats
53
+
54
+ Meshes can be in `.obj`, `.glb`, `.stl`, or `.gltf` format. Other formats are allowed, but won't be rendered in the gradio [Model3D](https://www.gradio.app/docs/gradio/model3d) component.
55
+
56
+ Splats can be in `.ply` or `.splat` format. They can be rendered in the gradio [Model3D](https://www.gradio.app/docs/gradio/model3d) component using the [gsplat.js](https://github.com/huggingface/gsplat.js) library.
57
+
58
+ ## Useful Resources
59
+
60
+ - [ML for 3D Course](https://huggingface.co/learn/ml-for-3d-course)
61
+ - [3D Arena Leaderboard](https://huggingface.co/spaces/dylanebert/3d-arena)
62
+ - [gsplat.js](https://github.com/huggingface/gsplat.js)
@@ -0,0 +1,56 @@
1
+ import type { TaskDataCustom } from "..";
2
+
3
+ const taskData: TaskDataCustom = {
4
+ datasets: [
5
+ {
6
+ description: "A large dataset of over 10 million 3D objects.",
7
+ id: "allenai/objaverse-xl",
8
+ },
9
+ {
10
+ description: "Descriptive captions for 3D objects in Objaverse.",
11
+ id: "tiange/Cap3D",
12
+ },
13
+ ],
14
+ demo: {
15
+ inputs: [
16
+ {
17
+ label: "Prompt",
18
+ content: "a cat statue",
19
+ type: "text",
20
+ },
21
+ ],
22
+ outputs: [
23
+ {
24
+ label: "Result",
25
+ content: "text-to-3d-3d-output-filename.glb",
26
+ type: "text",
27
+ },
28
+ ],
29
+ },
30
+ metrics: [],
31
+ models: [
32
+ {
33
+ description: "Text-to-3D mesh model by OpenAI",
34
+ id: "openai/shap-e",
35
+ },
36
+ {
37
+ description: "Generative 3D gaussian splatting model.",
38
+ id: "ashawkey/LGM",
39
+ },
40
+ ],
41
+ spaces: [
42
+ {
43
+ description: "Text-to-3D demo with mesh outputs.",
44
+ id: "hysts/Shap-E",
45
+ },
46
+ {
47
+ description: "Text/image-to-3D demo with splat outputs.",
48
+ id: "ashawkey/LGM",
49
+ },
50
+ ],
51
+ summary: "Text-to-3D models take in text input and produce 3D output.",
52
+ widgetModels: [],
53
+ youtubeId: "",
54
+ };
55
+
56
+ export default taskData;