@huggingface/tasks 0.10.21 → 0.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +101 -17
- package/dist/index.js +101 -16
- package/dist/scripts/inference-tei-import.d.ts +2 -0
- package/dist/scripts/inference-tei-import.d.ts.map +1 -0
- package/dist/src/index.d.ts +0 -1
- package/dist/src/index.d.ts.map +1 -1
- package/dist/src/model-data.d.ts +2 -15
- package/dist/src/model-data.d.ts.map +1 -1
- package/dist/src/model-libraries-snippets.d.ts +4 -0
- package/dist/src/model-libraries-snippets.d.ts.map +1 -1
- package/dist/src/model-libraries.d.ts +57 -2
- package/dist/src/model-libraries.d.ts.map +1 -1
- package/dist/src/tasks/feature-extraction/inference.d.ts +22 -7
- package/dist/src/tasks/feature-extraction/inference.d.ts.map +1 -1
- package/package.json +3 -2
- package/src/index.ts +0 -1
- package/src/model-data.ts +2 -16
- package/src/model-libraries-snippets.ts +47 -2
- package/src/model-libraries.ts +56 -0
- package/src/tasks/feature-extraction/inference.ts +23 -5
- package/src/tasks/feature-extraction/spec/input.json +34 -13
- package/src/tasks/feature-extraction/spec/output.json +10 -2
- package/src/tasks/image-text-to-text/data.ts +1 -1
package/dist/index.cjs
CHANGED
|
@@ -24,7 +24,6 @@ __export(src_exports, {
|
|
|
24
24
|
ALL_MODEL_LIBRARY_KEYS: () => ALL_MODEL_LIBRARY_KEYS,
|
|
25
25
|
DATASET_LIBRARIES_UI_ELEMENTS: () => DATASET_LIBRARIES_UI_ELEMENTS,
|
|
26
26
|
DEFAULT_MEMORY_OPTIONS: () => DEFAULT_MEMORY_OPTIONS,
|
|
27
|
-
InferenceDisplayability: () => InferenceDisplayability,
|
|
28
27
|
LIBRARY_TASK_MAPPING: () => LIBRARY_TASK_MAPPING,
|
|
29
28
|
LOCAL_APPS: () => LOCAL_APPS,
|
|
30
29
|
MAPPING_DEFAULT_WIDGET: () => MAPPING_DEFAULT_WIDGET,
|
|
@@ -2141,7 +2140,7 @@ var taskData11 = {
|
|
|
2141
2140
|
demo: {
|
|
2142
2141
|
inputs: [
|
|
2143
2142
|
{
|
|
2144
|
-
filename: "
|
|
2143
|
+
filename: "image-text-to-text-input.png",
|
|
2145
2144
|
type: "img"
|
|
2146
2145
|
},
|
|
2147
2146
|
{
|
|
@@ -4296,13 +4295,18 @@ var gliner = (model) => [
|
|
|
4296
4295
|
model = GLiNER.from_pretrained("${model.id}")`
|
|
4297
4296
|
];
|
|
4298
4297
|
var keras = (model) => [
|
|
4299
|
-
|
|
4298
|
+
`# Available backend options are: "jax", "tensorflow", "torch".
|
|
4299
|
+
import os
|
|
4300
|
+
os.environ["KERAS_BACKEND"] = "tensorflow"
|
|
4301
|
+
|
|
4302
|
+
import keras
|
|
4300
4303
|
|
|
4301
|
-
model =
|
|
4304
|
+
model = keras.saving.load_model("hf://${model.id}")
|
|
4302
4305
|
`
|
|
4303
4306
|
];
|
|
4304
4307
|
var keras_nlp = (model) => [
|
|
4305
4308
|
`# Available backend options are: "jax", "tensorflow", "torch".
|
|
4309
|
+
import os
|
|
4306
4310
|
os.environ["KERAS_BACKEND"] = "tensorflow"
|
|
4307
4311
|
|
|
4308
4312
|
import keras_nlp
|
|
@@ -4311,6 +4315,29 @@ tokenizer = keras_nlp.models.Tokenizer.from_preset("hf://${model.id}")
|
|
|
4311
4315
|
backbone = keras_nlp.models.Backbone.from_preset("hf://${model.id}")
|
|
4312
4316
|
`
|
|
4313
4317
|
];
|
|
4318
|
+
var tf_keras = (model) => [
|
|
4319
|
+
`# Note: 'keras<3.x' or 'tf_keras' must be installed (legacy)
|
|
4320
|
+
# See https://github.com/keras-team/tf-keras for more details.
|
|
4321
|
+
from huggingface_hub import from_pretrained_keras
|
|
4322
|
+
|
|
4323
|
+
model = from_pretrained_keras("${model.id}")
|
|
4324
|
+
`
|
|
4325
|
+
];
|
|
4326
|
+
var mars5_tts = (model) => [
|
|
4327
|
+
`# Install from https://github.com/Camb-ai/MARS5-TTS
|
|
4328
|
+
|
|
4329
|
+
from inference import Mars5TTS
|
|
4330
|
+
mars5 = Mars5TTS.from_pretrained("${model.id}")`
|
|
4331
|
+
];
|
|
4332
|
+
var mesh_anything = () => [
|
|
4333
|
+
`# Install from https://github.com/buaacyw/MeshAnything.git
|
|
4334
|
+
|
|
4335
|
+
from MeshAnything.models.meshanything import MeshAnything
|
|
4336
|
+
|
|
4337
|
+
# refer to https://github.com/buaacyw/MeshAnything/blob/main/main.py#L91 on how to define args
|
|
4338
|
+
# and https://github.com/buaacyw/MeshAnything/blob/main/app.py regarding usage
|
|
4339
|
+
model = MeshAnything(args)`
|
|
4340
|
+
];
|
|
4314
4341
|
var open_clip = (model) => [
|
|
4315
4342
|
`import open_clip
|
|
4316
4343
|
|
|
@@ -4759,6 +4786,19 @@ var audiocraft = (model) => {
|
|
|
4759
4786
|
return [`# Type of model unknown.`];
|
|
4760
4787
|
}
|
|
4761
4788
|
};
|
|
4789
|
+
var whisperkit = () => [
|
|
4790
|
+
`# Install CLI with Homebrew on macOS device
|
|
4791
|
+
brew install whisperkit-cli
|
|
4792
|
+
|
|
4793
|
+
# View all available inference options
|
|
4794
|
+
whisperkit-cli transcribe --help
|
|
4795
|
+
|
|
4796
|
+
# Download and run inference using whisper base model
|
|
4797
|
+
whisperkit-cli transcribe --audio-path /path/to/audio.mp3
|
|
4798
|
+
|
|
4799
|
+
# Or use your preferred model variant
|
|
4800
|
+
whisperkit-cli transcribe --model "large-v3" --model-prefix "distil" --audio-path /path/to/audio.mp3 --verbose`
|
|
4801
|
+
];
|
|
4762
4802
|
|
|
4763
4803
|
// src/model-libraries.ts
|
|
4764
4804
|
var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
@@ -4793,7 +4833,8 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
4793
4833
|
repoName: "audiocraft",
|
|
4794
4834
|
repoUrl: "https://github.com/facebookresearch/audiocraft",
|
|
4795
4835
|
snippets: audiocraft,
|
|
4796
|
-
filter: false
|
|
4836
|
+
filter: false,
|
|
4837
|
+
countDownloads: `path:"state_dict.bin"`
|
|
4797
4838
|
},
|
|
4798
4839
|
audioseal: {
|
|
4799
4840
|
prettyLabel: "AudioSeal",
|
|
@@ -4825,6 +4866,12 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
4825
4866
|
filter: false,
|
|
4826
4867
|
countDownloads: `path:"params.index.json"`
|
|
4827
4868
|
},
|
|
4869
|
+
champ: {
|
|
4870
|
+
prettyLabel: "Champ",
|
|
4871
|
+
repoName: "Champ",
|
|
4872
|
+
repoUrl: "https://github.com/fudan-generative-vision/champ",
|
|
4873
|
+
countDownloads: `path:"champ/motion_module.pth"`
|
|
4874
|
+
},
|
|
4828
4875
|
chat_tts: {
|
|
4829
4876
|
prettyLabel: "ChatTTS",
|
|
4830
4877
|
repoName: "ChatTTS",
|
|
@@ -4833,6 +4880,13 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
4833
4880
|
filter: false,
|
|
4834
4881
|
countDownloads: `path:"asset/GPT.pt"`
|
|
4835
4882
|
},
|
|
4883
|
+
colpali: {
|
|
4884
|
+
prettyLabel: "ColPali",
|
|
4885
|
+
repoName: "ColPali",
|
|
4886
|
+
repoUrl: "https://github.com/ManuelFay/colpali",
|
|
4887
|
+
filter: false,
|
|
4888
|
+
countDownloads: `path:"adapter_config.json"`
|
|
4889
|
+
},
|
|
4836
4890
|
diffusers: {
|
|
4837
4891
|
prettyLabel: "Diffusers",
|
|
4838
4892
|
repoName: "\u{1F917}/diffusers",
|
|
@@ -4932,6 +4986,12 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
4932
4986
|
filter: false,
|
|
4933
4987
|
countDownloads: `path:"ckpt/tensor00000_000" OR path:"ckpt-0/tensor00000_000"`
|
|
4934
4988
|
},
|
|
4989
|
+
hallo: {
|
|
4990
|
+
prettyLabel: "Hallo",
|
|
4991
|
+
repoName: "Hallo",
|
|
4992
|
+
repoUrl: "https://github.com/fudan-generative-vision/hallo",
|
|
4993
|
+
countDownloads: `path:"hallo/net.pth"`
|
|
4994
|
+
},
|
|
4935
4995
|
"hunyuan-dit": {
|
|
4936
4996
|
prettyLabel: "HunyuanDiT",
|
|
4937
4997
|
repoName: "HunyuanDiT",
|
|
@@ -4945,6 +5005,16 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
4945
5005
|
docsUrl: "https://huggingface.co/docs/hub/keras",
|
|
4946
5006
|
snippets: keras,
|
|
4947
5007
|
filter: true,
|
|
5008
|
+
countDownloads: `path:"config.json" OR path_extension:"keras"`
|
|
5009
|
+
},
|
|
5010
|
+
"tf-keras": {
|
|
5011
|
+
// Legacy "Keras 2" library (tensorflow-only)
|
|
5012
|
+
prettyLabel: "TF-Keras",
|
|
5013
|
+
repoName: "TF-Keras",
|
|
5014
|
+
repoUrl: "https://github.com/keras-team/tf-keras",
|
|
5015
|
+
docsUrl: "https://huggingface.co/docs/hub/tf-keras",
|
|
5016
|
+
snippets: tf_keras,
|
|
5017
|
+
filter: true,
|
|
4948
5018
|
countDownloads: `path:"saved_model.pb"`
|
|
4949
5019
|
},
|
|
4950
5020
|
"keras-nlp": {
|
|
@@ -4964,6 +5034,22 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
4964
5034
|
repoName: "mindspore",
|
|
4965
5035
|
repoUrl: "https://github.com/mindspore-ai/mindspore"
|
|
4966
5036
|
},
|
|
5037
|
+
"mars5-tts": {
|
|
5038
|
+
prettyLabel: "MARS5-TTS",
|
|
5039
|
+
repoName: "MARS5-TTS",
|
|
5040
|
+
repoUrl: "https://github.com/Camb-ai/MARS5-TTS",
|
|
5041
|
+
filter: false,
|
|
5042
|
+
countDownloads: `path:"mars5_ar.safetensors"`,
|
|
5043
|
+
snippets: mars5_tts
|
|
5044
|
+
},
|
|
5045
|
+
"mesh-anything": {
|
|
5046
|
+
prettyLabel: "MeshAnything",
|
|
5047
|
+
repoName: "MeshAnything",
|
|
5048
|
+
repoUrl: "https://github.com/buaacyw/MeshAnything",
|
|
5049
|
+
filter: false,
|
|
5050
|
+
countDownloads: `path:"MeshAnything_350m.pth"`,
|
|
5051
|
+
snippets: mesh_anything
|
|
5052
|
+
},
|
|
4967
5053
|
"ml-agents": {
|
|
4968
5054
|
prettyLabel: "ml-agents",
|
|
4969
5055
|
repoName: "ml-agents",
|
|
@@ -4989,6 +5075,14 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
4989
5075
|
filter: false,
|
|
4990
5076
|
countDownloads: `path:"model.safetensors"`
|
|
4991
5077
|
},
|
|
5078
|
+
"mlc-llm": {
|
|
5079
|
+
prettyLabel: "MLC-LLM",
|
|
5080
|
+
repoName: "MLC-LLM",
|
|
5081
|
+
repoUrl: "https://github.com/mlc-ai/mlc-llm",
|
|
5082
|
+
docsUrl: "https://llm.mlc.ai/docs/",
|
|
5083
|
+
filter: false,
|
|
5084
|
+
countDownloads: `path:"mlc-chat-config.json"`
|
|
5085
|
+
},
|
|
4992
5086
|
nemo: {
|
|
4993
5087
|
prettyLabel: "NeMo",
|
|
4994
5088
|
repoName: "NeMo",
|
|
@@ -5199,23 +5293,14 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
5199
5293
|
prettyLabel: "WhisperKit",
|
|
5200
5294
|
repoName: "WhisperKit",
|
|
5201
5295
|
repoUrl: "https://github.com/argmaxinc/WhisperKit",
|
|
5296
|
+
docsUrl: "https://github.com/argmaxinc/WhisperKit?tab=readme-ov-file#homebrew",
|
|
5297
|
+
snippets: whisperkit,
|
|
5202
5298
|
countDownloads: `path_filename:"model" AND path_extension:"mil" AND _exists_:"path_prefix"`
|
|
5203
5299
|
}
|
|
5204
5300
|
};
|
|
5205
5301
|
var ALL_MODEL_LIBRARY_KEYS = Object.keys(MODEL_LIBRARIES_UI_ELEMENTS);
|
|
5206
5302
|
var ALL_DISPLAY_MODEL_LIBRARY_KEYS = Object.entries(MODEL_LIBRARIES_UI_ELEMENTS).filter(([_, v]) => v.filter).map(([k]) => k);
|
|
5207
5303
|
|
|
5208
|
-
// src/model-data.ts
|
|
5209
|
-
var InferenceDisplayability = /* @__PURE__ */ ((InferenceDisplayability2) => {
|
|
5210
|
-
InferenceDisplayability2["Yes"] = "Yes";
|
|
5211
|
-
InferenceDisplayability2["ExplicitOptOut"] = "ExplicitOptOut";
|
|
5212
|
-
InferenceDisplayability2["CustomCode"] = "CustomCode";
|
|
5213
|
-
InferenceDisplayability2["LibraryNotDetected"] = "LibraryNotDetected";
|
|
5214
|
-
InferenceDisplayability2["PipelineNotDetected"] = "PipelineNotDetected";
|
|
5215
|
-
InferenceDisplayability2["PipelineLibraryPairNotSupported"] = "PipelineLibraryPairNotSupported";
|
|
5216
|
-
return InferenceDisplayability2;
|
|
5217
|
-
})(InferenceDisplayability || {});
|
|
5218
|
-
|
|
5219
5304
|
// src/tokenizer-data.ts
|
|
5220
5305
|
var SPECIAL_TOKENS_ATTRIBUTES = [
|
|
5221
5306
|
"bos_token",
|
|
@@ -6308,7 +6393,6 @@ var DATASET_LIBRARIES_UI_ELEMENTS = {
|
|
|
6308
6393
|
ALL_MODEL_LIBRARY_KEYS,
|
|
6309
6394
|
DATASET_LIBRARIES_UI_ELEMENTS,
|
|
6310
6395
|
DEFAULT_MEMORY_OPTIONS,
|
|
6311
|
-
InferenceDisplayability,
|
|
6312
6396
|
LIBRARY_TASK_MAPPING,
|
|
6313
6397
|
LOCAL_APPS,
|
|
6314
6398
|
MAPPING_DEFAULT_WIDGET,
|
package/dist/index.js
CHANGED
|
@@ -2102,7 +2102,7 @@ var taskData11 = {
|
|
|
2102
2102
|
demo: {
|
|
2103
2103
|
inputs: [
|
|
2104
2104
|
{
|
|
2105
|
-
filename: "
|
|
2105
|
+
filename: "image-text-to-text-input.png",
|
|
2106
2106
|
type: "img"
|
|
2107
2107
|
},
|
|
2108
2108
|
{
|
|
@@ -4257,13 +4257,18 @@ var gliner = (model) => [
|
|
|
4257
4257
|
model = GLiNER.from_pretrained("${model.id}")`
|
|
4258
4258
|
];
|
|
4259
4259
|
var keras = (model) => [
|
|
4260
|
-
|
|
4260
|
+
`# Available backend options are: "jax", "tensorflow", "torch".
|
|
4261
|
+
import os
|
|
4262
|
+
os.environ["KERAS_BACKEND"] = "tensorflow"
|
|
4263
|
+
|
|
4264
|
+
import keras
|
|
4261
4265
|
|
|
4262
|
-
model =
|
|
4266
|
+
model = keras.saving.load_model("hf://${model.id}")
|
|
4263
4267
|
`
|
|
4264
4268
|
];
|
|
4265
4269
|
var keras_nlp = (model) => [
|
|
4266
4270
|
`# Available backend options are: "jax", "tensorflow", "torch".
|
|
4271
|
+
import os
|
|
4267
4272
|
os.environ["KERAS_BACKEND"] = "tensorflow"
|
|
4268
4273
|
|
|
4269
4274
|
import keras_nlp
|
|
@@ -4272,6 +4277,29 @@ tokenizer = keras_nlp.models.Tokenizer.from_preset("hf://${model.id}")
|
|
|
4272
4277
|
backbone = keras_nlp.models.Backbone.from_preset("hf://${model.id}")
|
|
4273
4278
|
`
|
|
4274
4279
|
];
|
|
4280
|
+
var tf_keras = (model) => [
|
|
4281
|
+
`# Note: 'keras<3.x' or 'tf_keras' must be installed (legacy)
|
|
4282
|
+
# See https://github.com/keras-team/tf-keras for more details.
|
|
4283
|
+
from huggingface_hub import from_pretrained_keras
|
|
4284
|
+
|
|
4285
|
+
model = from_pretrained_keras("${model.id}")
|
|
4286
|
+
`
|
|
4287
|
+
];
|
|
4288
|
+
var mars5_tts = (model) => [
|
|
4289
|
+
`# Install from https://github.com/Camb-ai/MARS5-TTS
|
|
4290
|
+
|
|
4291
|
+
from inference import Mars5TTS
|
|
4292
|
+
mars5 = Mars5TTS.from_pretrained("${model.id}")`
|
|
4293
|
+
];
|
|
4294
|
+
var mesh_anything = () => [
|
|
4295
|
+
`# Install from https://github.com/buaacyw/MeshAnything.git
|
|
4296
|
+
|
|
4297
|
+
from MeshAnything.models.meshanything import MeshAnything
|
|
4298
|
+
|
|
4299
|
+
# refer to https://github.com/buaacyw/MeshAnything/blob/main/main.py#L91 on how to define args
|
|
4300
|
+
# and https://github.com/buaacyw/MeshAnything/blob/main/app.py regarding usage
|
|
4301
|
+
model = MeshAnything(args)`
|
|
4302
|
+
];
|
|
4275
4303
|
var open_clip = (model) => [
|
|
4276
4304
|
`import open_clip
|
|
4277
4305
|
|
|
@@ -4720,6 +4748,19 @@ var audiocraft = (model) => {
|
|
|
4720
4748
|
return [`# Type of model unknown.`];
|
|
4721
4749
|
}
|
|
4722
4750
|
};
|
|
4751
|
+
var whisperkit = () => [
|
|
4752
|
+
`# Install CLI with Homebrew on macOS device
|
|
4753
|
+
brew install whisperkit-cli
|
|
4754
|
+
|
|
4755
|
+
# View all available inference options
|
|
4756
|
+
whisperkit-cli transcribe --help
|
|
4757
|
+
|
|
4758
|
+
# Download and run inference using whisper base model
|
|
4759
|
+
whisperkit-cli transcribe --audio-path /path/to/audio.mp3
|
|
4760
|
+
|
|
4761
|
+
# Or use your preferred model variant
|
|
4762
|
+
whisperkit-cli transcribe --model "large-v3" --model-prefix "distil" --audio-path /path/to/audio.mp3 --verbose`
|
|
4763
|
+
];
|
|
4723
4764
|
|
|
4724
4765
|
// src/model-libraries.ts
|
|
4725
4766
|
var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
@@ -4754,7 +4795,8 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
4754
4795
|
repoName: "audiocraft",
|
|
4755
4796
|
repoUrl: "https://github.com/facebookresearch/audiocraft",
|
|
4756
4797
|
snippets: audiocraft,
|
|
4757
|
-
filter: false
|
|
4798
|
+
filter: false,
|
|
4799
|
+
countDownloads: `path:"state_dict.bin"`
|
|
4758
4800
|
},
|
|
4759
4801
|
audioseal: {
|
|
4760
4802
|
prettyLabel: "AudioSeal",
|
|
@@ -4786,6 +4828,12 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
4786
4828
|
filter: false,
|
|
4787
4829
|
countDownloads: `path:"params.index.json"`
|
|
4788
4830
|
},
|
|
4831
|
+
champ: {
|
|
4832
|
+
prettyLabel: "Champ",
|
|
4833
|
+
repoName: "Champ",
|
|
4834
|
+
repoUrl: "https://github.com/fudan-generative-vision/champ",
|
|
4835
|
+
countDownloads: `path:"champ/motion_module.pth"`
|
|
4836
|
+
},
|
|
4789
4837
|
chat_tts: {
|
|
4790
4838
|
prettyLabel: "ChatTTS",
|
|
4791
4839
|
repoName: "ChatTTS",
|
|
@@ -4794,6 +4842,13 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
4794
4842
|
filter: false,
|
|
4795
4843
|
countDownloads: `path:"asset/GPT.pt"`
|
|
4796
4844
|
},
|
|
4845
|
+
colpali: {
|
|
4846
|
+
prettyLabel: "ColPali",
|
|
4847
|
+
repoName: "ColPali",
|
|
4848
|
+
repoUrl: "https://github.com/ManuelFay/colpali",
|
|
4849
|
+
filter: false,
|
|
4850
|
+
countDownloads: `path:"adapter_config.json"`
|
|
4851
|
+
},
|
|
4797
4852
|
diffusers: {
|
|
4798
4853
|
prettyLabel: "Diffusers",
|
|
4799
4854
|
repoName: "\u{1F917}/diffusers",
|
|
@@ -4893,6 +4948,12 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
4893
4948
|
filter: false,
|
|
4894
4949
|
countDownloads: `path:"ckpt/tensor00000_000" OR path:"ckpt-0/tensor00000_000"`
|
|
4895
4950
|
},
|
|
4951
|
+
hallo: {
|
|
4952
|
+
prettyLabel: "Hallo",
|
|
4953
|
+
repoName: "Hallo",
|
|
4954
|
+
repoUrl: "https://github.com/fudan-generative-vision/hallo",
|
|
4955
|
+
countDownloads: `path:"hallo/net.pth"`
|
|
4956
|
+
},
|
|
4896
4957
|
"hunyuan-dit": {
|
|
4897
4958
|
prettyLabel: "HunyuanDiT",
|
|
4898
4959
|
repoName: "HunyuanDiT",
|
|
@@ -4906,6 +4967,16 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
4906
4967
|
docsUrl: "https://huggingface.co/docs/hub/keras",
|
|
4907
4968
|
snippets: keras,
|
|
4908
4969
|
filter: true,
|
|
4970
|
+
countDownloads: `path:"config.json" OR path_extension:"keras"`
|
|
4971
|
+
},
|
|
4972
|
+
"tf-keras": {
|
|
4973
|
+
// Legacy "Keras 2" library (tensorflow-only)
|
|
4974
|
+
prettyLabel: "TF-Keras",
|
|
4975
|
+
repoName: "TF-Keras",
|
|
4976
|
+
repoUrl: "https://github.com/keras-team/tf-keras",
|
|
4977
|
+
docsUrl: "https://huggingface.co/docs/hub/tf-keras",
|
|
4978
|
+
snippets: tf_keras,
|
|
4979
|
+
filter: true,
|
|
4909
4980
|
countDownloads: `path:"saved_model.pb"`
|
|
4910
4981
|
},
|
|
4911
4982
|
"keras-nlp": {
|
|
@@ -4925,6 +4996,22 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
4925
4996
|
repoName: "mindspore",
|
|
4926
4997
|
repoUrl: "https://github.com/mindspore-ai/mindspore"
|
|
4927
4998
|
},
|
|
4999
|
+
"mars5-tts": {
|
|
5000
|
+
prettyLabel: "MARS5-TTS",
|
|
5001
|
+
repoName: "MARS5-TTS",
|
|
5002
|
+
repoUrl: "https://github.com/Camb-ai/MARS5-TTS",
|
|
5003
|
+
filter: false,
|
|
5004
|
+
countDownloads: `path:"mars5_ar.safetensors"`,
|
|
5005
|
+
snippets: mars5_tts
|
|
5006
|
+
},
|
|
5007
|
+
"mesh-anything": {
|
|
5008
|
+
prettyLabel: "MeshAnything",
|
|
5009
|
+
repoName: "MeshAnything",
|
|
5010
|
+
repoUrl: "https://github.com/buaacyw/MeshAnything",
|
|
5011
|
+
filter: false,
|
|
5012
|
+
countDownloads: `path:"MeshAnything_350m.pth"`,
|
|
5013
|
+
snippets: mesh_anything
|
|
5014
|
+
},
|
|
4928
5015
|
"ml-agents": {
|
|
4929
5016
|
prettyLabel: "ml-agents",
|
|
4930
5017
|
repoName: "ml-agents",
|
|
@@ -4950,6 +5037,14 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
4950
5037
|
filter: false,
|
|
4951
5038
|
countDownloads: `path:"model.safetensors"`
|
|
4952
5039
|
},
|
|
5040
|
+
"mlc-llm": {
|
|
5041
|
+
prettyLabel: "MLC-LLM",
|
|
5042
|
+
repoName: "MLC-LLM",
|
|
5043
|
+
repoUrl: "https://github.com/mlc-ai/mlc-llm",
|
|
5044
|
+
docsUrl: "https://llm.mlc.ai/docs/",
|
|
5045
|
+
filter: false,
|
|
5046
|
+
countDownloads: `path:"mlc-chat-config.json"`
|
|
5047
|
+
},
|
|
4953
5048
|
nemo: {
|
|
4954
5049
|
prettyLabel: "NeMo",
|
|
4955
5050
|
repoName: "NeMo",
|
|
@@ -5160,23 +5255,14 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
5160
5255
|
prettyLabel: "WhisperKit",
|
|
5161
5256
|
repoName: "WhisperKit",
|
|
5162
5257
|
repoUrl: "https://github.com/argmaxinc/WhisperKit",
|
|
5258
|
+
docsUrl: "https://github.com/argmaxinc/WhisperKit?tab=readme-ov-file#homebrew",
|
|
5259
|
+
snippets: whisperkit,
|
|
5163
5260
|
countDownloads: `path_filename:"model" AND path_extension:"mil" AND _exists_:"path_prefix"`
|
|
5164
5261
|
}
|
|
5165
5262
|
};
|
|
5166
5263
|
var ALL_MODEL_LIBRARY_KEYS = Object.keys(MODEL_LIBRARIES_UI_ELEMENTS);
|
|
5167
5264
|
var ALL_DISPLAY_MODEL_LIBRARY_KEYS = Object.entries(MODEL_LIBRARIES_UI_ELEMENTS).filter(([_, v]) => v.filter).map(([k]) => k);
|
|
5168
5265
|
|
|
5169
|
-
// src/model-data.ts
|
|
5170
|
-
var InferenceDisplayability = /* @__PURE__ */ ((InferenceDisplayability2) => {
|
|
5171
|
-
InferenceDisplayability2["Yes"] = "Yes";
|
|
5172
|
-
InferenceDisplayability2["ExplicitOptOut"] = "ExplicitOptOut";
|
|
5173
|
-
InferenceDisplayability2["CustomCode"] = "CustomCode";
|
|
5174
|
-
InferenceDisplayability2["LibraryNotDetected"] = "LibraryNotDetected";
|
|
5175
|
-
InferenceDisplayability2["PipelineNotDetected"] = "PipelineNotDetected";
|
|
5176
|
-
InferenceDisplayability2["PipelineLibraryPairNotSupported"] = "PipelineLibraryPairNotSupported";
|
|
5177
|
-
return InferenceDisplayability2;
|
|
5178
|
-
})(InferenceDisplayability || {});
|
|
5179
|
-
|
|
5180
5266
|
// src/tokenizer-data.ts
|
|
5181
5267
|
var SPECIAL_TOKENS_ATTRIBUTES = [
|
|
5182
5268
|
"bos_token",
|
|
@@ -6268,7 +6354,6 @@ export {
|
|
|
6268
6354
|
ALL_MODEL_LIBRARY_KEYS,
|
|
6269
6355
|
DATASET_LIBRARIES_UI_ELEMENTS,
|
|
6270
6356
|
DEFAULT_MEMORY_OPTIONS,
|
|
6271
|
-
InferenceDisplayability,
|
|
6272
6357
|
LIBRARY_TASK_MAPPING,
|
|
6273
6358
|
LOCAL_APPS,
|
|
6274
6359
|
MAPPING_DEFAULT_WIDGET,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"inference-tei-import.d.ts","sourceRoot":"","sources":["../../scripts/inference-tei-import.ts"],"names":[],"mappings":""}
|
package/dist/src/index.d.ts
CHANGED
|
@@ -8,7 +8,6 @@ export type { LibraryUiElement, ModelLibraryKey } from "./model-libraries";
|
|
|
8
8
|
export type { ModelData, TransformersInfo } from "./model-data";
|
|
9
9
|
export type { AddedToken, SpecialTokensMap, TokenizerConfig } from "./tokenizer-data";
|
|
10
10
|
export type { WidgetExample, WidgetExampleAttribute, WidgetExampleAssetAndPromptInput, WidgetExampleAssetAndTextInput, WidgetExampleAssetAndZeroShotInput, WidgetExampleAssetInput, WidgetExampleChatInput, WidgetExampleSentenceSimilarityInput, WidgetExampleStructuredDataInput, WidgetExampleTableDataInput, WidgetExampleTextAndContextInput, WidgetExampleTextAndTableInput, WidgetExampleTextInput, WidgetExampleZeroShotTextInput, WidgetExampleOutput, WidgetExampleOutputUrl, WidgetExampleOutputLabels, WidgetExampleOutputAnswerScore, WidgetExampleOutputText, } from "./widget-example";
|
|
11
|
-
export { InferenceDisplayability } from "./model-data";
|
|
12
11
|
export { SPECIAL_TOKENS_ATTRIBUTES } from "./tokenizer-data";
|
|
13
12
|
import * as snippets from "./snippets";
|
|
14
13
|
export { snippets };
|
package/dist/src/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,oBAAoB,EAAE,MAAM,oBAAoB,CAAC;AAC1D,OAAO,EAAE,sBAAsB,EAAE,MAAM,yBAAyB,CAAC;AACjE,YAAY,EAAE,QAAQ,EAAE,QAAQ,EAAE,aAAa,EAAE,WAAW,EAAE,MAAM,SAAS,CAAC;AAC9E,cAAc,SAAS,CAAC;AACxB,OAAO,EACN,aAAa,EACb,cAAc,EACd,KAAK,UAAU,EACf,KAAK,YAAY,EACjB,KAAK,YAAY,EACjB,KAAK,QAAQ,EACb,UAAU,EACV,eAAe,EACf,aAAa,EACb,kBAAkB,GAClB,MAAM,aAAa,CAAC;AACrB,OAAO,EAAE,8BAA8B,EAAE,sBAAsB,EAAE,2BAA2B,EAAE,MAAM,mBAAmB,CAAC;AACxH,YAAY,EAAE,gBAAgB,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAC3E,YAAY,EAAE,SAAS,EAAE,gBAAgB,EAAE,MAAM,cAAc,CAAC;AAChE,YAAY,EAAE,UAAU,EAAE,gBAAgB,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AACtF,YAAY,EACX,aAAa,EACb,sBAAsB,EACtB,gCAAgC,EAChC,8BAA8B,EAC9B,kCAAkC,EAClC,uBAAuB,EACvB,sBAAsB,EACtB,oCAAoC,EACpC,gCAAgC,EAChC,2BAA2B,EAC3B,gCAAgC,EAChC,8BAA8B,EAC9B,sBAAsB,EACtB,8BAA8B,EAC9B,mBAAmB,EACnB,sBAAsB,EACtB,yBAAyB,EACzB,8BAA8B,EAC9B,uBAAuB,GACvB,MAAM,kBAAkB,CAAC;AAC1B,OAAO,EAAE,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,oBAAoB,EAAE,MAAM,oBAAoB,CAAC;AAC1D,OAAO,EAAE,sBAAsB,EAAE,MAAM,yBAAyB,CAAC;AACjE,YAAY,EAAE,QAAQ,EAAE,QAAQ,EAAE,aAAa,EAAE,WAAW,EAAE,MAAM,SAAS,CAAC;AAC9E,cAAc,SAAS,CAAC;AACxB,OAAO,EACN,aAAa,EACb,cAAc,EACd,KAAK,UAAU,EACf,KAAK,YAAY,EACjB,KAAK,YAAY,EACjB,KAAK,QAAQ,EACb,UAAU,EACV,eAAe,EACf,aAAa,EACb,kBAAkB,GAClB,MAAM,aAAa,CAAC;AACrB,OAAO,EAAE,8BAA8B,EAAE,sBAAsB,EAAE,2BAA2B,EAAE,MAAM,mBAAmB,CAAC;AACxH,YAAY,EAAE,gBAAgB,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAC3E,YAAY,EAAE,SAAS,EAAE,gBAAgB,EAAE,MAAM,cAAc,CAAC;AAChE,YAAY,EAAE,UAAU,EAAE,gBAAgB,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AACtF,YAAY,EACX,aAAa,EACb,sBAAsB,EACtB,gCAAgC,EAChC,8BAA8B,EAC9B,kCAAkC,EAClC,uBAAuB,EACvB,sBAAsB,EACtB,oCAAoC,EACpC,gCAAgC,EAChC,2BAA2B,EAC3B,gCAAgC,EAChC,8BAA8B,EAC9B,sBAAsB,EACtB,8BAA8B,EAC9B,mBAAmB,EACnB,sBAAsB,EACtB,yBAAyB,EACzB,8BAA8B,EAC9B,uBAAuB,GACvB,MAAM,kBAAkB,CAAC;AAC1B,OAAO,EAAE,yBAAyB,EAAE,MAAM,kBAAkB,CAAC;AAE7D,OAAO,KAAK,QAAQ,MAAM,YAAY,CAAC;AACvC,OAAO,EAAE,QAAQ,EAAE,CAAC;AAEpB,OAAO,EAAE,IAAI,EAAE,sBAAsB,EAAE,MAAM,YAAY,CAAC;AAC1D,YAAY,EAAE,YAAY,EAAE,OAAO,EAAE,MAAM,YAAY,CAAC;AACxD,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,YAAY,EAAE,QAAQ,EAAE,WAAW,EAAE,MAAM,cAAc,CAAC;AAE1D,OAAO,EAAE,6BAA6B,EAAE,MAAM,qBAAqB,CAAC;AACpE,YAAY,EAAE,uBAAuB,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC"}
|
package/dist/src/model-data.d.ts
CHANGED
|
@@ -1,20 +1,6 @@
|
|
|
1
1
|
import type { PipelineType } from "./pipelines";
|
|
2
2
|
import type { WidgetExample } from "./widget-example";
|
|
3
3
|
import type { TokenizerConfig } from "./tokenizer-data";
|
|
4
|
-
export declare enum InferenceDisplayability {
|
|
5
|
-
/**
|
|
6
|
-
* Yes
|
|
7
|
-
*/
|
|
8
|
-
Yes = "Yes",
|
|
9
|
-
/**
|
|
10
|
-
* And then, all the possible reasons why it's no:
|
|
11
|
-
*/
|
|
12
|
-
ExplicitOptOut = "ExplicitOptOut",
|
|
13
|
-
CustomCode = "CustomCode",
|
|
14
|
-
LibraryNotDetected = "LibraryNotDetected",
|
|
15
|
-
PipelineNotDetected = "PipelineNotDetected",
|
|
16
|
-
PipelineLibraryPairNotSupported = "PipelineLibraryPairNotSupported"
|
|
17
|
-
}
|
|
18
4
|
/**
|
|
19
5
|
* Public interface for model metadata
|
|
20
6
|
*/
|
|
@@ -25,8 +11,9 @@ export interface ModelData {
|
|
|
25
11
|
id: string;
|
|
26
12
|
/**
|
|
27
13
|
* Whether or not to enable inference widget for this model
|
|
14
|
+
* TODO(type it)
|
|
28
15
|
*/
|
|
29
|
-
inference:
|
|
16
|
+
inference: string;
|
|
30
17
|
/**
|
|
31
18
|
* is this model private?
|
|
32
19
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-data.d.ts","sourceRoot":"","sources":["../../src/model-data.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAChD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACtD,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAExD
|
|
1
|
+
{"version":3,"file":"model-data.d.ts","sourceRoot":"","sources":["../../src/model-data.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAChD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACtD,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAExD;;GAEG;AACH,MAAM,WAAW,SAAS;IACzB;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IACX;;;OAGG;IACH,SAAS,EAAE,MAAM,CAAC;IAClB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB;;OAEG;IACH,MAAM,CAAC,EAAE;QACR,aAAa,CAAC,EAAE,MAAM,EAAE,CAAC;QACzB;;WAEG;QACH,QAAQ,CAAC,EAAE;YACV;;eAEG;YACH,CAAC,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;SACpB,CAAC;QACF,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,mBAAmB,CAAC,EAAE;YACrB,IAAI,CAAC,EAAE,MAAM,CAAC;YACd,YAAY,CAAC,EAAE,OAAO,CAAC;YACvB,YAAY,CAAC,EAAE,OAAO,CAAC;SACvB,CAAC;QACF,gBAAgB,CAAC,EAAE,eAAe,CAAC;QACnC,oBAAoB,CAAC,EAAE;YACtB,UAAU,CAAC,EAAE,MAAM,CAAC;YACpB,WAAW,CAAC,EAAE,MAAM,CAAC;SACrB,CAAC;QACF,SAAS,CAAC,EAAE;YACX,WAAW,CAAC,EAAE,MAAM,CAAC;SACrB,CAAC;QACF,OAAO,CAAC,EAAE;YACT,KAAK,CAAC,EAAE;gBACP,IAAI,CAAC,EAAE,MAAM,CAAC;aACd,CAAC;YACF,YAAY,CAAC,EAAE,MAAM,CAAC;SACtB,CAAC;QACF,WAAW,CAAC,EAAE;YACb,qBAAqB,CAAC,EAAE,MAAM,CAAC;YAC/B,iBAAiB,CAAC,EAAE,MAAM,CAAC;YAC3B,gBAAgB,CAAC,EAAE,MAAM,CAAC;SAC1B,CAAC;QACF,IAAI,CAAC,EAAE;YACN,uBAAuB,CAAC,EAAE,MAAM,CAAC;YACjC,SAAS,CAAC,EAAE,MAAM,CAAC;SACnB,CAAC;KACF,CAAC;IACF;;OAEG;IACH,IAAI,EAAE,MAAM,EAAE,CAAC;IACf;;OAEG;IACH,gBAAgB,CAAC,EAAE,gBAAgB,CAAC;IACpC;;OAEG;IACH,YAAY,CAAC,EAAE,YAAY,GAAG,SAAS,CAAC;IACxC;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAChC;;;;;OAKG;IACH,UAAU,CAAC,EAAE,aAAa,EAAE,GAAG,SAAS,CAAC;IACzC;;;;;;;;;OASG;IACH,QAAQ,CAAC,EAAE;QACV,SAAS,CAAC,EACP,OAAO,GACP;YACA,UAAU,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;SACpC,CAAC;QACL,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,CAAC;KAC/B,CAAC;IACF;;;OAGG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;CACtB;AAED;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,YAAY,CAAC,EAAE,YAAY,CAAC;IAC5B;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;CACnB"}
|
|
@@ -15,6 +15,9 @@ export declare const flair: (model: ModelData) => string[];
|
|
|
15
15
|
export declare const gliner: (model: ModelData) => string[];
|
|
16
16
|
export declare const keras: (model: ModelData) => string[];
|
|
17
17
|
export declare const keras_nlp: (model: ModelData) => string[];
|
|
18
|
+
export declare const tf_keras: (model: ModelData) => string[];
|
|
19
|
+
export declare const mars5_tts: (model: ModelData) => string[];
|
|
20
|
+
export declare const mesh_anything: () => string[];
|
|
18
21
|
export declare const open_clip: (model: ModelData) => string[];
|
|
19
22
|
export declare const paddlenlp: (model: ModelData) => string[];
|
|
20
23
|
export declare const pyannote_audio_pipeline: (model: ModelData) => string[];
|
|
@@ -45,4 +48,5 @@ export declare const mlxim: (model: ModelData) => string[];
|
|
|
45
48
|
export declare const nemo: (model: ModelData) => string[];
|
|
46
49
|
export declare const pythae: (model: ModelData) => string[];
|
|
47
50
|
export declare const audiocraft: (model: ModelData) => string[];
|
|
51
|
+
export declare const whisperkit: () => string[];
|
|
48
52
|
//# sourceMappingURL=model-libraries-snippets.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAY9C,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAMF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AA+BF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,
|
|
1
|
+
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAY9C,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAMF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AA+BF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAS9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AAEF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAI7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EA4CrD,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,OAAO,QAAO,MAAM,EAYhC,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAK5C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AA6BF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAUnD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAYnC,CAAC"}
|
|
@@ -86,6 +86,7 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
86
86
|
repoUrl: string;
|
|
87
87
|
snippets: (model: ModelData) => string[];
|
|
88
88
|
filter: false;
|
|
89
|
+
countDownloads: string;
|
|
89
90
|
};
|
|
90
91
|
audioseal: {
|
|
91
92
|
prettyLabel: string;
|
|
@@ -117,6 +118,12 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
117
118
|
filter: false;
|
|
118
119
|
countDownloads: string;
|
|
119
120
|
};
|
|
121
|
+
champ: {
|
|
122
|
+
prettyLabel: string;
|
|
123
|
+
repoName: string;
|
|
124
|
+
repoUrl: string;
|
|
125
|
+
countDownloads: string;
|
|
126
|
+
};
|
|
120
127
|
chat_tts: {
|
|
121
128
|
prettyLabel: string;
|
|
122
129
|
repoName: string;
|
|
@@ -125,6 +132,13 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
125
132
|
filter: false;
|
|
126
133
|
countDownloads: string;
|
|
127
134
|
};
|
|
135
|
+
colpali: {
|
|
136
|
+
prettyLabel: string;
|
|
137
|
+
repoName: string;
|
|
138
|
+
repoUrl: string;
|
|
139
|
+
filter: false;
|
|
140
|
+
countDownloads: string;
|
|
141
|
+
};
|
|
128
142
|
diffusers: {
|
|
129
143
|
prettyLabel: string;
|
|
130
144
|
repoName: string;
|
|
@@ -223,6 +237,12 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
223
237
|
filter: false;
|
|
224
238
|
countDownloads: string;
|
|
225
239
|
};
|
|
240
|
+
hallo: {
|
|
241
|
+
prettyLabel: string;
|
|
242
|
+
repoName: string;
|
|
243
|
+
repoUrl: string;
|
|
244
|
+
countDownloads: string;
|
|
245
|
+
};
|
|
226
246
|
"hunyuan-dit": {
|
|
227
247
|
prettyLabel: string;
|
|
228
248
|
repoName: string;
|
|
@@ -238,6 +258,15 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
238
258
|
filter: true;
|
|
239
259
|
countDownloads: string;
|
|
240
260
|
};
|
|
261
|
+
"tf-keras": {
|
|
262
|
+
prettyLabel: string;
|
|
263
|
+
repoName: string;
|
|
264
|
+
repoUrl: string;
|
|
265
|
+
docsUrl: string;
|
|
266
|
+
snippets: (model: ModelData) => string[];
|
|
267
|
+
filter: true;
|
|
268
|
+
countDownloads: string;
|
|
269
|
+
};
|
|
241
270
|
"keras-nlp": {
|
|
242
271
|
prettyLabel: string;
|
|
243
272
|
repoName: string;
|
|
@@ -255,6 +284,22 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
255
284
|
repoName: string;
|
|
256
285
|
repoUrl: string;
|
|
257
286
|
};
|
|
287
|
+
"mars5-tts": {
|
|
288
|
+
prettyLabel: string;
|
|
289
|
+
repoName: string;
|
|
290
|
+
repoUrl: string;
|
|
291
|
+
filter: false;
|
|
292
|
+
countDownloads: string;
|
|
293
|
+
snippets: (model: ModelData) => string[];
|
|
294
|
+
};
|
|
295
|
+
"mesh-anything": {
|
|
296
|
+
prettyLabel: string;
|
|
297
|
+
repoName: string;
|
|
298
|
+
repoUrl: string;
|
|
299
|
+
filter: false;
|
|
300
|
+
countDownloads: string;
|
|
301
|
+
snippets: () => string[];
|
|
302
|
+
};
|
|
258
303
|
"ml-agents": {
|
|
259
304
|
prettyLabel: string;
|
|
260
305
|
repoName: string;
|
|
@@ -280,6 +325,14 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
280
325
|
filter: false;
|
|
281
326
|
countDownloads: string;
|
|
282
327
|
};
|
|
328
|
+
"mlc-llm": {
|
|
329
|
+
prettyLabel: string;
|
|
330
|
+
repoName: string;
|
|
331
|
+
repoUrl: string;
|
|
332
|
+
docsUrl: string;
|
|
333
|
+
filter: false;
|
|
334
|
+
countDownloads: string;
|
|
335
|
+
};
|
|
283
336
|
nemo: {
|
|
284
337
|
prettyLabel: string;
|
|
285
338
|
repoName: string;
|
|
@@ -490,10 +543,12 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
490
543
|
prettyLabel: string;
|
|
491
544
|
repoName: string;
|
|
492
545
|
repoUrl: string;
|
|
546
|
+
docsUrl: string;
|
|
547
|
+
snippets: () => string[];
|
|
493
548
|
countDownloads: string;
|
|
494
549
|
};
|
|
495
550
|
};
|
|
496
551
|
export type ModelLibraryKey = keyof typeof MODEL_LIBRARIES_UI_ELEMENTS;
|
|
497
|
-
export declare const ALL_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "bm25s" | "chat_tts" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hunyuan-dit" | "keras" | "keras-nlp" | "k2" | "mindspore" | "ml-agents" | "mlx" | "mlx-image" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
|
|
498
|
-
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "bm25s" | "chat_tts" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hunyuan-dit" | "keras" | "keras-nlp" | "k2" | "mindspore" | "ml-agents" | "mlx" | "mlx-image" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
|
|
552
|
+
export declare const ALL_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "bm25s" | "champ" | "chat_tts" | "colpali" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "mindspore" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
|
|
553
|
+
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "bm25s" | "champ" | "chat_tts" | "colpali" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "mindspore" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
|
|
499
554
|
//# sourceMappingURL=model-libraries.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B
|
|
1
|
+
{"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAgfI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,43BAAgE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,43BAQ1B,CAAC"}
|
|
@@ -3,21 +3,36 @@
|
|
|
3
3
|
*
|
|
4
4
|
* Using src/scripts/inference-codegen
|
|
5
5
|
*/
|
|
6
|
-
export type FeatureExtractionOutput =
|
|
6
|
+
export type FeatureExtractionOutput = Array<number[]>;
|
|
7
7
|
/**
|
|
8
|
-
*
|
|
8
|
+
* Feature Extraction Input.
|
|
9
|
+
*
|
|
10
|
+
* Auto-generated from TEI specs.
|
|
11
|
+
* For more details, check out
|
|
12
|
+
* https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-tei-import.ts.
|
|
9
13
|
*/
|
|
10
14
|
export interface FeatureExtractionInput {
|
|
11
15
|
/**
|
|
12
|
-
* The text to
|
|
16
|
+
* The text to embed.
|
|
13
17
|
*/
|
|
14
18
|
inputs: string;
|
|
19
|
+
normalize?: boolean;
|
|
15
20
|
/**
|
|
16
|
-
*
|
|
21
|
+
* The name of the prompt that should be used by for encoding. If not set, no prompt
|
|
22
|
+
* will be applied.
|
|
23
|
+
*
|
|
24
|
+
* Must be a key in the `Sentence Transformers` configuration `prompts` dictionary.
|
|
25
|
+
*
|
|
26
|
+
* For example if ``prompt_name`` is "query" and the ``prompts`` is {"query": "query: ",
|
|
27
|
+
* ...},
|
|
28
|
+
* then the sentence "What is the capital of France?" will be encoded as
|
|
29
|
+
* "query: What is the capital of France?" because the prompt text will be prepended before
|
|
30
|
+
* any text to encode.
|
|
17
31
|
*/
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
32
|
+
prompt_name?: string;
|
|
33
|
+
truncate?: boolean;
|
|
34
|
+
truncation_direction?: FeatureExtractionInputTruncationDirection;
|
|
21
35
|
[property: string]: unknown;
|
|
22
36
|
}
|
|
37
|
+
export type FeatureExtractionInputTruncationDirection = "Left" | "Right";
|
|
23
38
|
//# sourceMappingURL=inference.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/feature-extraction/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,MAAM,MAAM,uBAAuB,GAAG,
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/feature-extraction/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,MAAM,MAAM,uBAAuB,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,CAAC;AAEtD;;;;;;GAMG;AACH,MAAM,WAAW,sBAAsB;IACtC;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IACf,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;;;;;;;;;;OAWG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,oBAAoB,CAAC,EAAE,yCAAyC,CAAC;IACjE,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED,MAAM,MAAM,yCAAyC,GAAG,MAAM,GAAG,OAAO,CAAC"}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@huggingface/tasks",
|
|
3
3
|
"packageManager": "pnpm@8.10.5",
|
|
4
|
-
"version": "0.
|
|
4
|
+
"version": "0.11.0",
|
|
5
5
|
"description": "List of ML tasks for huggingface.co/tasks",
|
|
6
6
|
"repository": "https://github.com/huggingface/huggingface.js.git",
|
|
7
7
|
"publishConfig": {
|
|
@@ -44,6 +44,7 @@
|
|
|
44
44
|
"build": "tsup src/index.ts --format cjs,esm --clean && tsc --emitDeclarationOnly --declaration",
|
|
45
45
|
"check": "tsc",
|
|
46
46
|
"inference-codegen": "tsx scripts/inference-codegen.ts && prettier --write src/tasks/*/inference.ts",
|
|
47
|
-
"inference-tgi-import": "tsx scripts/inference-tgi-import.ts && prettier --write src/tasks/text-generation/spec/*.json && prettier --write src/tasks/chat-completion/spec/*.json"
|
|
47
|
+
"inference-tgi-import": "tsx scripts/inference-tgi-import.ts && prettier --write src/tasks/text-generation/spec/*.json && prettier --write src/tasks/chat-completion/spec/*.json",
|
|
48
|
+
"inference-tei-import": "tsx scripts/inference-tei-import.ts && prettier --write src/tasks/feature-extraction/spec/*.json"
|
|
48
49
|
}
|
|
49
50
|
}
|
package/src/index.ts
CHANGED
|
@@ -39,7 +39,6 @@ export type {
|
|
|
39
39
|
WidgetExampleOutputAnswerScore,
|
|
40
40
|
WidgetExampleOutputText,
|
|
41
41
|
} from "./widget-example";
|
|
42
|
-
export { InferenceDisplayability } from "./model-data";
|
|
43
42
|
export { SPECIAL_TOKENS_ATTRIBUTES } from "./tokenizer-data";
|
|
44
43
|
|
|
45
44
|
import * as snippets from "./snippets";
|
package/src/model-data.ts
CHANGED
|
@@ -2,21 +2,6 @@ import type { PipelineType } from "./pipelines";
|
|
|
2
2
|
import type { WidgetExample } from "./widget-example";
|
|
3
3
|
import type { TokenizerConfig } from "./tokenizer-data";
|
|
4
4
|
|
|
5
|
-
export enum InferenceDisplayability {
|
|
6
|
-
/**
|
|
7
|
-
* Yes
|
|
8
|
-
*/
|
|
9
|
-
Yes = "Yes",
|
|
10
|
-
/**
|
|
11
|
-
* And then, all the possible reasons why it's no:
|
|
12
|
-
*/
|
|
13
|
-
ExplicitOptOut = "ExplicitOptOut",
|
|
14
|
-
CustomCode = "CustomCode",
|
|
15
|
-
LibraryNotDetected = "LibraryNotDetected",
|
|
16
|
-
PipelineNotDetected = "PipelineNotDetected",
|
|
17
|
-
PipelineLibraryPairNotSupported = "PipelineLibraryPairNotSupported",
|
|
18
|
-
}
|
|
19
|
-
|
|
20
5
|
/**
|
|
21
6
|
* Public interface for model metadata
|
|
22
7
|
*/
|
|
@@ -27,8 +12,9 @@ export interface ModelData {
|
|
|
27
12
|
id: string;
|
|
28
13
|
/**
|
|
29
14
|
* Whether or not to enable inference widget for this model
|
|
15
|
+
* TODO(type it)
|
|
30
16
|
*/
|
|
31
|
-
inference:
|
|
17
|
+
inference: string;
|
|
32
18
|
/**
|
|
33
19
|
* is this model private?
|
|
34
20
|
*/
|
|
@@ -192,14 +192,19 @@ model = GLiNER.from_pretrained("${model.id}")`,
|
|
|
192
192
|
];
|
|
193
193
|
|
|
194
194
|
export const keras = (model: ModelData): string[] => [
|
|
195
|
-
|
|
195
|
+
`# Available backend options are: "jax", "tensorflow", "torch".
|
|
196
|
+
import os
|
|
197
|
+
os.environ["KERAS_BACKEND"] = "tensorflow"
|
|
198
|
+
|
|
199
|
+
import keras
|
|
196
200
|
|
|
197
|
-
model =
|
|
201
|
+
model = keras.saving.load_model("hf://${model.id}")
|
|
198
202
|
`,
|
|
199
203
|
];
|
|
200
204
|
|
|
201
205
|
export const keras_nlp = (model: ModelData): string[] => [
|
|
202
206
|
`# Available backend options are: "jax", "tensorflow", "torch".
|
|
207
|
+
import os
|
|
203
208
|
os.environ["KERAS_BACKEND"] = "tensorflow"
|
|
204
209
|
|
|
205
210
|
import keras_nlp
|
|
@@ -209,6 +214,32 @@ backbone = keras_nlp.models.Backbone.from_preset("hf://${model.id}")
|
|
|
209
214
|
`,
|
|
210
215
|
];
|
|
211
216
|
|
|
217
|
+
export const tf_keras = (model: ModelData): string[] => [
|
|
218
|
+
`# Note: 'keras<3.x' or 'tf_keras' must be installed (legacy)
|
|
219
|
+
# See https://github.com/keras-team/tf-keras for more details.
|
|
220
|
+
from huggingface_hub import from_pretrained_keras
|
|
221
|
+
|
|
222
|
+
model = from_pretrained_keras("${model.id}")
|
|
223
|
+
`,
|
|
224
|
+
];
|
|
225
|
+
|
|
226
|
+
export const mars5_tts = (model: ModelData): string[] => [
|
|
227
|
+
`# Install from https://github.com/Camb-ai/MARS5-TTS
|
|
228
|
+
|
|
229
|
+
from inference import Mars5TTS
|
|
230
|
+
mars5 = Mars5TTS.from_pretrained("${model.id}")`,
|
|
231
|
+
];
|
|
232
|
+
|
|
233
|
+
export const mesh_anything = (): string[] => [
|
|
234
|
+
`# Install from https://github.com/buaacyw/MeshAnything.git
|
|
235
|
+
|
|
236
|
+
from MeshAnything.models.meshanything import MeshAnything
|
|
237
|
+
|
|
238
|
+
# refer to https://github.com/buaacyw/MeshAnything/blob/main/main.py#L91 on how to define args
|
|
239
|
+
# and https://github.com/buaacyw/MeshAnything/blob/main/app.py regarding usage
|
|
240
|
+
model = MeshAnything(args)`,
|
|
241
|
+
];
|
|
242
|
+
|
|
212
243
|
export const open_clip = (model: ModelData): string[] => [
|
|
213
244
|
`import open_clip
|
|
214
245
|
|
|
@@ -715,4 +746,18 @@ export const audiocraft = (model: ModelData): string[] => {
|
|
|
715
746
|
return [`# Type of model unknown.`];
|
|
716
747
|
}
|
|
717
748
|
};
|
|
749
|
+
|
|
750
|
+
export const whisperkit = (): string[] => [
|
|
751
|
+
`# Install CLI with Homebrew on macOS device
|
|
752
|
+
brew install whisperkit-cli
|
|
753
|
+
|
|
754
|
+
# View all available inference options
|
|
755
|
+
whisperkit-cli transcribe --help
|
|
756
|
+
|
|
757
|
+
# Download and run inference using whisper base model
|
|
758
|
+
whisperkit-cli transcribe --audio-path /path/to/audio.mp3
|
|
759
|
+
|
|
760
|
+
# Or use your preferred model variant
|
|
761
|
+
whisperkit-cli transcribe --model "large-v3" --model-prefix "distil" --audio-path /path/to/audio.mp3 --verbose`,
|
|
762
|
+
];
|
|
718
763
|
//#endregion
|
package/src/model-libraries.ts
CHANGED
|
@@ -90,6 +90,7 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
90
90
|
repoUrl: "https://github.com/facebookresearch/audiocraft",
|
|
91
91
|
snippets: snippets.audiocraft,
|
|
92
92
|
filter: false,
|
|
93
|
+
countDownloads: `path:"state_dict.bin"`,
|
|
93
94
|
},
|
|
94
95
|
audioseal: {
|
|
95
96
|
prettyLabel: "AudioSeal",
|
|
@@ -121,6 +122,12 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
121
122
|
filter: false,
|
|
122
123
|
countDownloads: `path:"params.index.json"`,
|
|
123
124
|
},
|
|
125
|
+
champ: {
|
|
126
|
+
prettyLabel: "Champ",
|
|
127
|
+
repoName: "Champ",
|
|
128
|
+
repoUrl: "https://github.com/fudan-generative-vision/champ",
|
|
129
|
+
countDownloads: `path:"champ/motion_module.pth"`,
|
|
130
|
+
},
|
|
124
131
|
chat_tts: {
|
|
125
132
|
prettyLabel: "ChatTTS",
|
|
126
133
|
repoName: "ChatTTS",
|
|
@@ -129,6 +136,13 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
129
136
|
filter: false,
|
|
130
137
|
countDownloads: `path:"asset/GPT.pt"`,
|
|
131
138
|
},
|
|
139
|
+
colpali: {
|
|
140
|
+
prettyLabel: "ColPali",
|
|
141
|
+
repoName: "ColPali",
|
|
142
|
+
repoUrl: "https://github.com/ManuelFay/colpali",
|
|
143
|
+
filter: false,
|
|
144
|
+
countDownloads: `path:"adapter_config.json"`,
|
|
145
|
+
},
|
|
132
146
|
diffusers: {
|
|
133
147
|
prettyLabel: "Diffusers",
|
|
134
148
|
repoName: "🤗/diffusers",
|
|
@@ -228,6 +242,12 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
228
242
|
filter: false,
|
|
229
243
|
countDownloads: `path:"ckpt/tensor00000_000" OR path:"ckpt-0/tensor00000_000"`,
|
|
230
244
|
},
|
|
245
|
+
hallo: {
|
|
246
|
+
prettyLabel: "Hallo",
|
|
247
|
+
repoName: "Hallo",
|
|
248
|
+
repoUrl: "https://github.com/fudan-generative-vision/hallo",
|
|
249
|
+
countDownloads: `path:"hallo/net.pth"`,
|
|
250
|
+
},
|
|
231
251
|
"hunyuan-dit": {
|
|
232
252
|
prettyLabel: "HunyuanDiT",
|
|
233
253
|
repoName: "HunyuanDiT",
|
|
@@ -241,6 +261,16 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
241
261
|
docsUrl: "https://huggingface.co/docs/hub/keras",
|
|
242
262
|
snippets: snippets.keras,
|
|
243
263
|
filter: true,
|
|
264
|
+
countDownloads: `path:"config.json" OR path_extension:"keras"`,
|
|
265
|
+
},
|
|
266
|
+
"tf-keras": {
|
|
267
|
+
// Legacy "Keras 2" library (tensorflow-only)
|
|
268
|
+
prettyLabel: "TF-Keras",
|
|
269
|
+
repoName: "TF-Keras",
|
|
270
|
+
repoUrl: "https://github.com/keras-team/tf-keras",
|
|
271
|
+
docsUrl: "https://huggingface.co/docs/hub/tf-keras",
|
|
272
|
+
snippets: snippets.tf_keras,
|
|
273
|
+
filter: true,
|
|
244
274
|
countDownloads: `path:"saved_model.pb"`,
|
|
245
275
|
},
|
|
246
276
|
"keras-nlp": {
|
|
@@ -260,6 +290,22 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
260
290
|
repoName: "mindspore",
|
|
261
291
|
repoUrl: "https://github.com/mindspore-ai/mindspore",
|
|
262
292
|
},
|
|
293
|
+
"mars5-tts": {
|
|
294
|
+
prettyLabel: "MARS5-TTS",
|
|
295
|
+
repoName: "MARS5-TTS",
|
|
296
|
+
repoUrl: "https://github.com/Camb-ai/MARS5-TTS",
|
|
297
|
+
filter: false,
|
|
298
|
+
countDownloads: `path:"mars5_ar.safetensors"`,
|
|
299
|
+
snippets: snippets.mars5_tts,
|
|
300
|
+
},
|
|
301
|
+
"mesh-anything": {
|
|
302
|
+
prettyLabel: "MeshAnything",
|
|
303
|
+
repoName: "MeshAnything",
|
|
304
|
+
repoUrl: "https://github.com/buaacyw/MeshAnything",
|
|
305
|
+
filter: false,
|
|
306
|
+
countDownloads: `path:"MeshAnything_350m.pth"`,
|
|
307
|
+
snippets: snippets.mesh_anything,
|
|
308
|
+
},
|
|
263
309
|
"ml-agents": {
|
|
264
310
|
prettyLabel: "ml-agents",
|
|
265
311
|
repoName: "ml-agents",
|
|
@@ -285,6 +331,14 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
285
331
|
filter: false,
|
|
286
332
|
countDownloads: `path:"model.safetensors"`,
|
|
287
333
|
},
|
|
334
|
+
"mlc-llm": {
|
|
335
|
+
prettyLabel: "MLC-LLM",
|
|
336
|
+
repoName: "MLC-LLM",
|
|
337
|
+
repoUrl: "https://github.com/mlc-ai/mlc-llm",
|
|
338
|
+
docsUrl: "https://llm.mlc.ai/docs/",
|
|
339
|
+
filter: false,
|
|
340
|
+
countDownloads: `path:"mlc-chat-config.json"`,
|
|
341
|
+
},
|
|
288
342
|
nemo: {
|
|
289
343
|
prettyLabel: "NeMo",
|
|
290
344
|
repoName: "NeMo",
|
|
@@ -495,6 +549,8 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
495
549
|
prettyLabel: "WhisperKit",
|
|
496
550
|
repoName: "WhisperKit",
|
|
497
551
|
repoUrl: "https://github.com/argmaxinc/WhisperKit",
|
|
552
|
+
docsUrl: "https://github.com/argmaxinc/WhisperKit?tab=readme-ov-file#homebrew",
|
|
553
|
+
snippets: snippets.whisperkit,
|
|
498
554
|
countDownloads: `path_filename:"model" AND path_extension:"mil" AND _exists_:"path_prefix"`,
|
|
499
555
|
},
|
|
500
556
|
} satisfies Record<string, LibraryUiElement>;
|
|
@@ -4,19 +4,37 @@
|
|
|
4
4
|
* Using src/scripts/inference-codegen
|
|
5
5
|
*/
|
|
6
6
|
|
|
7
|
-
export type FeatureExtractionOutput =
|
|
7
|
+
export type FeatureExtractionOutput = Array<number[]>;
|
|
8
8
|
|
|
9
9
|
/**
|
|
10
|
-
*
|
|
10
|
+
* Feature Extraction Input.
|
|
11
|
+
*
|
|
12
|
+
* Auto-generated from TEI specs.
|
|
13
|
+
* For more details, check out
|
|
14
|
+
* https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-tei-import.ts.
|
|
11
15
|
*/
|
|
12
16
|
export interface FeatureExtractionInput {
|
|
13
17
|
/**
|
|
14
|
-
* The text to
|
|
18
|
+
* The text to embed.
|
|
15
19
|
*/
|
|
16
20
|
inputs: string;
|
|
21
|
+
normalize?: boolean;
|
|
17
22
|
/**
|
|
18
|
-
*
|
|
23
|
+
* The name of the prompt that should be used by for encoding. If not set, no prompt
|
|
24
|
+
* will be applied.
|
|
25
|
+
*
|
|
26
|
+
* Must be a key in the `Sentence Transformers` configuration `prompts` dictionary.
|
|
27
|
+
*
|
|
28
|
+
* For example if ``prompt_name`` is "query" and the ``prompts`` is {"query": "query: ",
|
|
29
|
+
* ...},
|
|
30
|
+
* then the sentence "What is the capital of France?" will be encoded as
|
|
31
|
+
* "query: What is the capital of France?" because the prompt text will be prepended before
|
|
32
|
+
* any text to encode.
|
|
19
33
|
*/
|
|
20
|
-
|
|
34
|
+
prompt_name?: string;
|
|
35
|
+
truncate?: boolean;
|
|
36
|
+
truncation_direction?: FeatureExtractionInputTruncationDirection;
|
|
21
37
|
[property: string]: unknown;
|
|
22
38
|
}
|
|
39
|
+
|
|
40
|
+
export type FeatureExtractionInputTruncationDirection = "Left" | "Right";
|
|
@@ -1,26 +1,47 @@
|
|
|
1
1
|
{
|
|
2
2
|
"$id": "/inference/schemas/feature-extraction/input.json",
|
|
3
3
|
"$schema": "http://json-schema.org/draft-06/schema#",
|
|
4
|
-
"description": "
|
|
4
|
+
"description": "Feature Extraction Input.\n\nAuto-generated from TEI specs.\nFor more details, check out https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-tei-import.ts.",
|
|
5
5
|
"title": "FeatureExtractionInput",
|
|
6
6
|
"type": "object",
|
|
7
|
+
"required": ["inputs"],
|
|
7
8
|
"properties": {
|
|
8
9
|
"inputs": {
|
|
9
|
-
"
|
|
10
|
-
"
|
|
10
|
+
"type": "string",
|
|
11
|
+
"description": "The text to embed."
|
|
11
12
|
},
|
|
12
|
-
"
|
|
13
|
-
"
|
|
14
|
-
"
|
|
13
|
+
"normalize": {
|
|
14
|
+
"type": "boolean",
|
|
15
|
+
"default": "true",
|
|
16
|
+
"example": "true"
|
|
17
|
+
},
|
|
18
|
+
"prompt_name": {
|
|
19
|
+
"type": "string",
|
|
20
|
+
"description": "The name of the prompt that should be used by for encoding. If not set, no prompt\nwill be applied.\n\nMust be a key in the `Sentence Transformers` configuration `prompts` dictionary.\n\nFor example if ``prompt_name`` is \"query\" and the ``prompts`` is {\"query\": \"query: \", ...},\nthen the sentence \"What is the capital of France?\" will be encoded as\n\"query: What is the capital of France?\" because the prompt text will be prepended before\nany text to encode.",
|
|
21
|
+
"default": "null",
|
|
22
|
+
"example": "null",
|
|
23
|
+
"nullable": true
|
|
24
|
+
},
|
|
25
|
+
"truncate": {
|
|
26
|
+
"type": "boolean",
|
|
27
|
+
"default": "false",
|
|
28
|
+
"example": "false",
|
|
29
|
+
"nullable": true
|
|
30
|
+
},
|
|
31
|
+
"truncation_direction": {
|
|
32
|
+
"allOf": [
|
|
33
|
+
{
|
|
34
|
+
"$ref": "#/$defs/FeatureExtractionInputTruncationDirection"
|
|
35
|
+
}
|
|
36
|
+
],
|
|
37
|
+
"default": "right"
|
|
15
38
|
}
|
|
16
39
|
},
|
|
17
40
|
"$defs": {
|
|
18
|
-
"
|
|
19
|
-
"
|
|
20
|
-
"
|
|
21
|
-
"
|
|
22
|
-
"properties": {}
|
|
41
|
+
"FeatureExtractionInputTruncationDirection": {
|
|
42
|
+
"type": "string",
|
|
43
|
+
"enum": ["Left", "Right"],
|
|
44
|
+
"title": "FeatureExtractionInputTruncationDirection"
|
|
23
45
|
}
|
|
24
|
-
}
|
|
25
|
-
"required": ["inputs"]
|
|
46
|
+
}
|
|
26
47
|
}
|
|
@@ -1,7 +1,15 @@
|
|
|
1
1
|
{
|
|
2
2
|
"$id": "/inference/schemas/feature-extraction/output.json",
|
|
3
3
|
"$schema": "http://json-schema.org/draft-06/schema#",
|
|
4
|
-
"description": "
|
|
4
|
+
"description": "Feature Extraction Output.\n\nAuto-generated from TEI specs.\nFor more details, check out https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-tei-import.ts.",
|
|
5
|
+
"title": "FeatureExtractionOutput",
|
|
5
6
|
"type": "array",
|
|
6
|
-
"
|
|
7
|
+
"$defs": {},
|
|
8
|
+
"items": {
|
|
9
|
+
"type": "array",
|
|
10
|
+
"items": {
|
|
11
|
+
"type": "number",
|
|
12
|
+
"format": "float"
|
|
13
|
+
}
|
|
14
|
+
}
|
|
7
15
|
}
|