@huggingface/tasks 0.12.27 → 0.12.28

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -795,7 +795,7 @@ var MAPPING_DEFAULT_WIDGET = /* @__PURE__ */ new Map([
795
795
  ]);
796
796
 
797
797
  // src/pipelines.ts
798
- var MODALITIES = ["cv", "nlp", "audio", "tabular", "multimodal", "rl", "other"];
798
+ var MODALITIES = ["multimodal", "nlp", "cv", "audio", "tabular", "rl", "other"];
799
799
  var MODALITY_LABELS = {
800
800
  multimodal: "Multimodal",
801
801
  nlp: "Natural Language Processing",
@@ -5797,6 +5797,12 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
5797
5797
  repoUrl: "https://github.com/abetlen/llama-cpp-python",
5798
5798
  snippets: llama_cpp_python
5799
5799
  },
5800
+ "mini-omni2": {
5801
+ prettyLabel: "Mini-Omni2",
5802
+ repoName: "Mini-Omni2",
5803
+ repoUrl: "https://github.com/gpt-omni/mini-omni2",
5804
+ countDownloads: `path:"model_config.yaml"`
5805
+ },
5800
5806
  mindspore: {
5801
5807
  prettyLabel: "MindSpore",
5802
5808
  repoName: "mindspore",
@@ -6250,7 +6256,28 @@ var inputsQuestionAnswering = () => `{
6250
6256
  }`;
6251
6257
  var inputsTextClassification = () => `"I like you. I love you"`;
6252
6258
  var inputsTokenClassification = () => `"My name is Sarah Jessica Parker but you can call me Jessica"`;
6253
- var inputsTextGeneration = () => `"Can you please let us know more details about your "`;
6259
+ var inputsTextGeneration = (model) => {
6260
+ if (model.tags.includes("conversational")) {
6261
+ return model.pipeline_tag === "text-generation" ? [{ role: "user", content: "What is the capital of France?" }] : [
6262
+ {
6263
+ role: "user",
6264
+ content: [
6265
+ {
6266
+ type: "text",
6267
+ text: "Describe this image in one sentence."
6268
+ },
6269
+ {
6270
+ type: "image_url",
6271
+ image_url: {
6272
+ url: "https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg"
6273
+ }
6274
+ }
6275
+ ]
6276
+ }
6277
+ ];
6278
+ }
6279
+ return `"Can you please let us know more details about your "`;
6280
+ };
6254
6281
  var inputsText2TextGeneration = () => `"The answer to the universe is"`;
6255
6282
  var inputsFillMask = (model) => `"The answer to the universe is ${model.mask_token}."`;
6256
6283
  var inputsSentenceSimilarity = () => `{
@@ -6307,13 +6334,15 @@ function getModelInputSnippet(model, noWrap = false, noQuotes = false) {
6307
6334
  const inputs = modelInputSnippets[model.pipeline_tag];
6308
6335
  if (inputs) {
6309
6336
  let result = inputs(model);
6310
- if (noWrap) {
6311
- result = result.replace(/(?:(?:\r?\n|\r)\t*)|\t+/g, " ");
6312
- }
6313
- if (noQuotes) {
6314
- const REGEX_QUOTES = /^"(.+)"$/s;
6315
- const match = result.match(REGEX_QUOTES);
6316
- result = match ? match[1] : result;
6337
+ if (typeof result === "string") {
6338
+ if (noWrap) {
6339
+ result = result.replace(/(?:(?:\r?\n|\r)\t*)|\t+/g, " ");
6340
+ }
6341
+ if (noQuotes) {
6342
+ const REGEX_QUOTES = /^"(.+)"$/s;
6343
+ const match = result.match(REGEX_QUOTES);
6344
+ result = match ? match[1] : result;
6345
+ }
6317
6346
  }
6318
6347
  return result;
6319
6348
  }
@@ -6376,20 +6405,7 @@ var snippetBasic = (model, accessToken) => ({
6376
6405
  var snippetTextGeneration = (model, accessToken, opts) => {
6377
6406
  if (model.tags.includes("conversational")) {
6378
6407
  const streaming = opts?.streaming ?? true;
6379
- const exampleMessages = model.pipeline_tag === "text-generation" ? [{ role: "user", content: "What is the capital of France?" }] : [
6380
- {
6381
- role: "user",
6382
- content: [
6383
- {
6384
- type: "image_url",
6385
- image_url: {
6386
- url: "https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg"
6387
- }
6388
- },
6389
- { type: "text", text: "Describe this image in one sentence." }
6390
- ]
6391
- }
6392
- ];
6408
+ const exampleMessages = getModelInputSnippet(model);
6393
6409
  const messages = opts?.messages ?? exampleMessages;
6394
6410
  const config = {
6395
6411
  ...opts?.temperature ? { temperature: opts.temperature } : void 0,
@@ -6489,20 +6505,7 @@ __export(python_exports, {
6489
6505
  });
6490
6506
  var snippetConversational = (model, accessToken, opts) => {
6491
6507
  const streaming = opts?.streaming ?? true;
6492
- const exampleMessages = model.pipeline_tag === "text-generation" ? [{ role: "user", content: "What is the capital of France?" }] : [
6493
- {
6494
- role: "user",
6495
- content: [
6496
- {
6497
- type: "image_url",
6498
- image_url: {
6499
- url: "https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg"
6500
- }
6501
- },
6502
- { type: "text", text: "Describe this image in one sentence." }
6503
- ]
6504
- }
6505
- ];
6508
+ const exampleMessages = getModelInputSnippet(model);
6506
6509
  const messages = opts?.messages ?? exampleMessages;
6507
6510
  const messagesStr = stringifyMessages(messages, {
6508
6511
  sep: ",\n ",
@@ -6800,20 +6803,7 @@ query({"inputs": ${getModelInputSnippet(model)}}).then((response) => {
6800
6803
  var snippetTextGeneration2 = (model, accessToken, opts) => {
6801
6804
  if (model.tags.includes("conversational")) {
6802
6805
  const streaming = opts?.streaming ?? true;
6803
- const exampleMessages = model.pipeline_tag === "text-generation" ? [{ role: "user", content: "What is the capital of France?" }] : [
6804
- {
6805
- role: "user",
6806
- content: [
6807
- {
6808
- type: "image_url",
6809
- image_url: {
6810
- url: "https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg"
6811
- }
6812
- },
6813
- { type: "text", text: "Describe this image in one sentence." }
6814
- ]
6815
- }
6816
- ];
6806
+ const exampleMessages = getModelInputSnippet(model);
6817
6807
  const messages = opts?.messages ?? exampleMessages;
6818
6808
  const messagesStr = stringifyMessages(messages, { sep: ",\n ", start: "[\n ", end: "\n ]" });
6819
6809
  const config = {
package/dist/index.js CHANGED
@@ -757,7 +757,7 @@ var MAPPING_DEFAULT_WIDGET = /* @__PURE__ */ new Map([
757
757
  ]);
758
758
 
759
759
  // src/pipelines.ts
760
- var MODALITIES = ["cv", "nlp", "audio", "tabular", "multimodal", "rl", "other"];
760
+ var MODALITIES = ["multimodal", "nlp", "cv", "audio", "tabular", "rl", "other"];
761
761
  var MODALITY_LABELS = {
762
762
  multimodal: "Multimodal",
763
763
  nlp: "Natural Language Processing",
@@ -5759,6 +5759,12 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
5759
5759
  repoUrl: "https://github.com/abetlen/llama-cpp-python",
5760
5760
  snippets: llama_cpp_python
5761
5761
  },
5762
+ "mini-omni2": {
5763
+ prettyLabel: "Mini-Omni2",
5764
+ repoName: "Mini-Omni2",
5765
+ repoUrl: "https://github.com/gpt-omni/mini-omni2",
5766
+ countDownloads: `path:"model_config.yaml"`
5767
+ },
5762
5768
  mindspore: {
5763
5769
  prettyLabel: "MindSpore",
5764
5770
  repoName: "mindspore",
@@ -6212,7 +6218,28 @@ var inputsQuestionAnswering = () => `{
6212
6218
  }`;
6213
6219
  var inputsTextClassification = () => `"I like you. I love you"`;
6214
6220
  var inputsTokenClassification = () => `"My name is Sarah Jessica Parker but you can call me Jessica"`;
6215
- var inputsTextGeneration = () => `"Can you please let us know more details about your "`;
6221
+ var inputsTextGeneration = (model) => {
6222
+ if (model.tags.includes("conversational")) {
6223
+ return model.pipeline_tag === "text-generation" ? [{ role: "user", content: "What is the capital of France?" }] : [
6224
+ {
6225
+ role: "user",
6226
+ content: [
6227
+ {
6228
+ type: "text",
6229
+ text: "Describe this image in one sentence."
6230
+ },
6231
+ {
6232
+ type: "image_url",
6233
+ image_url: {
6234
+ url: "https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg"
6235
+ }
6236
+ }
6237
+ ]
6238
+ }
6239
+ ];
6240
+ }
6241
+ return `"Can you please let us know more details about your "`;
6242
+ };
6216
6243
  var inputsText2TextGeneration = () => `"The answer to the universe is"`;
6217
6244
  var inputsFillMask = (model) => `"The answer to the universe is ${model.mask_token}."`;
6218
6245
  var inputsSentenceSimilarity = () => `{
@@ -6269,13 +6296,15 @@ function getModelInputSnippet(model, noWrap = false, noQuotes = false) {
6269
6296
  const inputs = modelInputSnippets[model.pipeline_tag];
6270
6297
  if (inputs) {
6271
6298
  let result = inputs(model);
6272
- if (noWrap) {
6273
- result = result.replace(/(?:(?:\r?\n|\r)\t*)|\t+/g, " ");
6274
- }
6275
- if (noQuotes) {
6276
- const REGEX_QUOTES = /^"(.+)"$/s;
6277
- const match = result.match(REGEX_QUOTES);
6278
- result = match ? match[1] : result;
6299
+ if (typeof result === "string") {
6300
+ if (noWrap) {
6301
+ result = result.replace(/(?:(?:\r?\n|\r)\t*)|\t+/g, " ");
6302
+ }
6303
+ if (noQuotes) {
6304
+ const REGEX_QUOTES = /^"(.+)"$/s;
6305
+ const match = result.match(REGEX_QUOTES);
6306
+ result = match ? match[1] : result;
6307
+ }
6279
6308
  }
6280
6309
  return result;
6281
6310
  }
@@ -6338,20 +6367,7 @@ var snippetBasic = (model, accessToken) => ({
6338
6367
  var snippetTextGeneration = (model, accessToken, opts) => {
6339
6368
  if (model.tags.includes("conversational")) {
6340
6369
  const streaming = opts?.streaming ?? true;
6341
- const exampleMessages = model.pipeline_tag === "text-generation" ? [{ role: "user", content: "What is the capital of France?" }] : [
6342
- {
6343
- role: "user",
6344
- content: [
6345
- {
6346
- type: "image_url",
6347
- image_url: {
6348
- url: "https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg"
6349
- }
6350
- },
6351
- { type: "text", text: "Describe this image in one sentence." }
6352
- ]
6353
- }
6354
- ];
6370
+ const exampleMessages = getModelInputSnippet(model);
6355
6371
  const messages = opts?.messages ?? exampleMessages;
6356
6372
  const config = {
6357
6373
  ...opts?.temperature ? { temperature: opts.temperature } : void 0,
@@ -6451,20 +6467,7 @@ __export(python_exports, {
6451
6467
  });
6452
6468
  var snippetConversational = (model, accessToken, opts) => {
6453
6469
  const streaming = opts?.streaming ?? true;
6454
- const exampleMessages = model.pipeline_tag === "text-generation" ? [{ role: "user", content: "What is the capital of France?" }] : [
6455
- {
6456
- role: "user",
6457
- content: [
6458
- {
6459
- type: "image_url",
6460
- image_url: {
6461
- url: "https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg"
6462
- }
6463
- },
6464
- { type: "text", text: "Describe this image in one sentence." }
6465
- ]
6466
- }
6467
- ];
6470
+ const exampleMessages = getModelInputSnippet(model);
6468
6471
  const messages = opts?.messages ?? exampleMessages;
6469
6472
  const messagesStr = stringifyMessages(messages, {
6470
6473
  sep: ",\n ",
@@ -6762,20 +6765,7 @@ query({"inputs": ${getModelInputSnippet(model)}}).then((response) => {
6762
6765
  var snippetTextGeneration2 = (model, accessToken, opts) => {
6763
6766
  if (model.tags.includes("conversational")) {
6764
6767
  const streaming = opts?.streaming ?? true;
6765
- const exampleMessages = model.pipeline_tag === "text-generation" ? [{ role: "user", content: "What is the capital of France?" }] : [
6766
- {
6767
- role: "user",
6768
- content: [
6769
- {
6770
- type: "image_url",
6771
- image_url: {
6772
- url: "https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg"
6773
- }
6774
- },
6775
- { type: "text", text: "Describe this image in one sentence." }
6776
- ]
6777
- }
6778
- ];
6768
+ const exampleMessages = getModelInputSnippet(model);
6779
6769
  const messages = opts?.messages ?? exampleMessages;
6780
6770
  const messagesStr = stringifyMessages(messages, { sep: ",\n ", start: "[\n ", end: "\n ]" });
6781
6771
  const config = {
@@ -367,6 +367,12 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
367
367
  repoUrl: string;
368
368
  snippets: (model: ModelData) => string[];
369
369
  };
370
+ "mini-omni2": {
371
+ prettyLabel: string;
372
+ repoName: string;
373
+ repoUrl: string;
374
+ countDownloads: string;
375
+ };
370
376
  mindspore: {
371
377
  prettyLabel: string;
372
378
  repoName: string;
@@ -766,6 +772,6 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
766
772
  };
767
773
  };
768
774
  export type ModelLibraryKey = keyof typeof MODEL_LIBRARIES_UI_ELEMENTS;
769
- export declare const ALL_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "deepforest" | "depth-anything-v2" | "depth-pro" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "cotracker" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hezar" | "hunyuan-dit" | "imstoucan" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "model2vec" | "moshi" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pxia" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "reverb" | "saelens" | "sam2" | "sample-factory" | "sapiens" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "ssr-speech" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "soloaudio" | "stable-baselines3" | "stanza" | "f5-tts" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "vfi-mamba" | "voicecraft" | "yolov10" | "whisperkit" | "3dtopia-xl")[];
770
- export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "deepforest" | "depth-anything-v2" | "depth-pro" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "cotracker" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hezar" | "hunyuan-dit" | "imstoucan" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "model2vec" | "moshi" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pxia" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "reverb" | "saelens" | "sam2" | "sample-factory" | "sapiens" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "ssr-speech" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "soloaudio" | "stable-baselines3" | "stanza" | "f5-tts" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "vfi-mamba" | "voicecraft" | "yolov10" | "whisperkit" | "3dtopia-xl")[];
775
+ export declare const ALL_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "deepforest" | "depth-anything-v2" | "depth-pro" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "cotracker" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hezar" | "hunyuan-dit" | "imstoucan" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mini-omni2" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "model2vec" | "moshi" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pxia" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "reverb" | "saelens" | "sam2" | "sample-factory" | "sapiens" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "ssr-speech" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "soloaudio" | "stable-baselines3" | "stanza" | "f5-tts" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "vfi-mamba" | "voicecraft" | "yolov10" | "whisperkit" | "3dtopia-xl")[];
776
+ export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "deepforest" | "depth-anything-v2" | "depth-pro" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "cotracker" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hezar" | "hunyuan-dit" | "imstoucan" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mini-omni2" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "model2vec" | "moshi" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pxia" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "reverb" | "saelens" | "sam2" | "sample-factory" | "sapiens" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "ssr-speech" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "soloaudio" | "stable-baselines3" | "stanza" | "f5-tts" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "vfi-mamba" | "voicecraft" | "yolov10" | "whisperkit" | "3dtopia-xl")[];
771
777
  //# sourceMappingURL=model-libraries.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAysBI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,yyCAAgE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,yyCAQ1B,CAAC"}
1
+ {"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA+sBI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,wzCAAgE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,wzCAQ1B,CAAC"}
@@ -1,4 +1,4 @@
1
- export declare const MODALITIES: readonly ["cv", "nlp", "audio", "tabular", "multimodal", "rl", "other"];
1
+ export declare const MODALITIES: readonly ["multimodal", "nlp", "cv", "audio", "tabular", "rl", "other"];
2
2
  export type Modality = (typeof MODALITIES)[number];
3
3
  export declare const MODALITY_LABELS: {
4
4
  multimodal: string;
@@ -1 +1 @@
1
- {"version":3,"file":"curl.d.ts","sourceRoot":"","sources":["../../../src/snippets/curl.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AACpD,OAAO,KAAK,EAAE,0BAA0B,EAAE,oBAAoB,EAAE,MAAM,mBAAmB,CAAC;AAG1F,OAAO,KAAK,EAAE,gBAAgB,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAErE,eAAO,MAAM,YAAY,UAAW,gBAAgB,eAAe,MAAM,KAAG,gBAM1E,CAAC;AAEH,eAAO,MAAM,qBAAqB,UAC1B,gBAAgB,eACV,MAAM,SACZ;IACN,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,QAAQ,CAAC,EAAE,0BAA0B,EAAE,CAAC;IACxC,WAAW,CAAC,EAAE,oBAAoB,CAAC,aAAa,CAAC,CAAC;IAClD,UAAU,CAAC,EAAE,oBAAoB,CAAC,YAAY,CAAC,CAAC;IAChD,KAAK,CAAC,EAAE,oBAAoB,CAAC,OAAO,CAAC,CAAC;CACtC,KACC,gBAsDF,CAAC;AAEF,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,eAAe,MAAM,KAAG,gBAM3F,CAAC;AAEH,eAAO,MAAM,WAAW,UAAW,gBAAgB,eAAe,MAAM,KAAG,gBAKzE,CAAC;AAEH,eAAO,MAAM,YAAY,EAAE,OAAO,CACjC,MAAM,CACL,YAAY,EACZ,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,KAAK,gBAAgB,CAClG,CA0BD,CAAC;AAEF,wBAAgB,uBAAuB,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,GAAG,gBAAgB,CAItG;AAED,wBAAgB,uBAAuB,CAAC,KAAK,EAAE,IAAI,CAAC,gBAAgB,EAAE,cAAc,CAAC,GAAG,OAAO,CAE9F"}
1
+ {"version":3,"file":"curl.d.ts","sourceRoot":"","sources":["../../../src/snippets/curl.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AACpD,OAAO,KAAK,EAAE,0BAA0B,EAAE,oBAAoB,EAAE,MAAM,mBAAmB,CAAC;AAG1F,OAAO,KAAK,EAAE,gBAAgB,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAErE,eAAO,MAAM,YAAY,UAAW,gBAAgB,eAAe,MAAM,KAAG,gBAM1E,CAAC;AAEH,eAAO,MAAM,qBAAqB,UAC1B,gBAAgB,eACV,MAAM,SACZ;IACN,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,QAAQ,CAAC,EAAE,0BAA0B,EAAE,CAAC;IACxC,WAAW,CAAC,EAAE,oBAAoB,CAAC,aAAa,CAAC,CAAC;IAClD,UAAU,CAAC,EAAE,oBAAoB,CAAC,YAAY,CAAC,CAAC;IAChD,KAAK,CAAC,EAAE,oBAAoB,CAAC,OAAO,CAAC,CAAC;CACtC,KACC,gBAsCF,CAAC;AAEF,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,eAAe,MAAM,KAAG,gBAM3F,CAAC;AAEH,eAAO,MAAM,WAAW,UAAW,gBAAgB,eAAe,MAAM,KAAG,gBAKzE,CAAC;AAEH,eAAO,MAAM,YAAY,EAAE,OAAO,CACjC,MAAM,CACL,YAAY,EACZ,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,KAAK,gBAAgB,CAClG,CA0BD,CAAC;AAEF,wBAAgB,uBAAuB,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,GAAG,gBAAgB,CAItG;AAED,wBAAgB,uBAAuB,CAAC,KAAK,EAAE,IAAI,CAAC,gBAAgB,EAAE,cAAc,CAAC,GAAG,OAAO,CAE9F"}
@@ -1,3 +1,4 @@
1
+ import type { ChatCompletionInputMessage } from "../tasks";
1
2
  import type { ModelDataMinimal } from "./types";
2
- export declare function getModelInputSnippet(model: ModelDataMinimal, noWrap?: boolean, noQuotes?: boolean): string;
3
+ export declare function getModelInputSnippet(model: ModelDataMinimal, noWrap?: boolean, noQuotes?: boolean): string | ChatCompletionInputMessage[];
3
4
  //# sourceMappingURL=inputs.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"inputs.d.ts","sourceRoot":"","sources":["../../../src/snippets/inputs.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,SAAS,CAAC;AAqHhD,wBAAgB,oBAAoB,CAAC,KAAK,EAAE,gBAAgB,EAAE,MAAM,UAAQ,EAAE,QAAQ,UAAQ,GAAG,MAAM,CAiBtG"}
1
+ {"version":3,"file":"inputs.d.ts","sourceRoot":"","sources":["../../../src/snippets/inputs.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,0BAA0B,EAAE,MAAM,UAAU,CAAC;AAC3D,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,SAAS,CAAC;AA4IhD,wBAAgB,oBAAoB,CACnC,KAAK,EAAE,gBAAgB,EACvB,MAAM,UAAQ,EACd,QAAQ,UAAQ,GACd,MAAM,GAAG,0BAA0B,EAAE,CAmBvC"}
@@ -1 +1 @@
1
- {"version":3,"file":"js.d.ts","sourceRoot":"","sources":["../../../src/snippets/js.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AACpD,OAAO,KAAK,EAAE,0BAA0B,EAAE,oBAAoB,EAAE,MAAM,mBAAmB,CAAC;AAG1F,OAAO,KAAK,EAAE,gBAAgB,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAErE,eAAO,MAAM,YAAY,UAAW,gBAAgB,eAAe,MAAM,KAAG,gBAoB1E,CAAC;AAEH,eAAO,MAAM,qBAAqB,UAC1B,gBAAgB,eACV,MAAM,SACZ;IACN,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,QAAQ,CAAC,EAAE,0BAA0B,EAAE,CAAC;IACxC,WAAW,CAAC,EAAE,oBAAoB,CAAC,aAAa,CAAC,CAAC;IAClD,UAAU,CAAC,EAAE,oBAAoB,CAAC,YAAY,CAAC,CAAC;IAChD,KAAK,CAAC,EAAE,oBAAoB,CAAC,OAAO,CAAC,CAAC;CACtC,KACC,gBAAgB,GAAG,gBAAgB,EA6HrC,CAAC;AAEF,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,eAAe,MAAM,KAAG,gBAsB3F,CAAC;AAEH,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,eAAe,MAAM,KAAG,gBAmBhF,CAAC;AAEH,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,eAAe,MAAM,KAAG,gBAuCjF,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,gBAAgB,eAAe,MAAM,KAAG,gBAqBzE,CAAC;AAEH,eAAO,MAAM,UAAU,EAAE,OAAO,CAC/B,MAAM,CACL,YAAY,EACZ,CACC,KAAK,EAAE,gBAAgB,EACvB,WAAW,EAAE,MAAM,EACnB,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,KAC1B,gBAAgB,GAAG,gBAAgB,EAAE,CAC1C,CA0BD,CAAC;AAEF,wBAAgB,qBAAqB,CACpC,KAAK,EAAE,gBAAgB,EACvB,WAAW,EAAE,MAAM,GACjB,gBAAgB,GAAG,gBAAgB,EAAE,CAIvC;AAED,wBAAgB,qBAAqB,CAAC,KAAK,EAAE,gBAAgB,GAAG,OAAO,CAEtE"}
1
+ {"version":3,"file":"js.d.ts","sourceRoot":"","sources":["../../../src/snippets/js.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AACpD,OAAO,KAAK,EAAE,0BAA0B,EAAE,oBAAoB,EAAE,MAAM,mBAAmB,CAAC;AAG1F,OAAO,KAAK,EAAE,gBAAgB,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAErE,eAAO,MAAM,YAAY,UAAW,gBAAgB,eAAe,MAAM,KAAG,gBAoB1E,CAAC;AAEH,eAAO,MAAM,qBAAqB,UAC1B,gBAAgB,eACV,MAAM,SACZ;IACN,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,QAAQ,CAAC,EAAE,0BAA0B,EAAE,CAAC;IACxC,WAAW,CAAC,EAAE,oBAAoB,CAAC,aAAa,CAAC,CAAC;IAClD,UAAU,CAAC,EAAE,oBAAoB,CAAC,YAAY,CAAC,CAAC;IAChD,KAAK,CAAC,EAAE,oBAAoB,CAAC,OAAO,CAAC,CAAC;CACtC,KACC,gBAAgB,GAAG,gBAAgB,EA6GrC,CAAC;AAEF,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,eAAe,MAAM,KAAG,gBAsB3F,CAAC;AAEH,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,eAAe,MAAM,KAAG,gBAmBhF,CAAC;AAEH,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,eAAe,MAAM,KAAG,gBAuCjF,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,gBAAgB,eAAe,MAAM,KAAG,gBAqBzE,CAAC;AAEH,eAAO,MAAM,UAAU,EAAE,OAAO,CAC/B,MAAM,CACL,YAAY,EACZ,CACC,KAAK,EAAE,gBAAgB,EACvB,WAAW,EAAE,MAAM,EACnB,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,KAC1B,gBAAgB,GAAG,gBAAgB,EAAE,CAC1C,CA0BD,CAAC;AAEF,wBAAgB,qBAAqB,CACpC,KAAK,EAAE,gBAAgB,EACvB,WAAW,EAAE,MAAM,GACjB,gBAAgB,GAAG,gBAAgB,EAAE,CAIvC;AAED,wBAAgB,qBAAqB,CAAC,KAAK,EAAE,gBAAgB,GAAG,OAAO,CAEtE"}
@@ -1 +1 @@
1
- {"version":3,"file":"python.d.ts","sourceRoot":"","sources":["../../../src/snippets/python.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AACpD,OAAO,KAAK,EAAE,0BAA0B,EAAE,oBAAoB,EAAE,MAAM,mBAAmB,CAAC;AAG1F,OAAO,KAAK,EAAE,gBAAgB,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAErE,eAAO,MAAM,qBAAqB,UAC1B,gBAAgB,eACV,MAAM,SACZ;IACN,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,QAAQ,CAAC,EAAE,0BAA0B,EAAE,CAAC;IACxC,WAAW,CAAC,EAAE,oBAAoB,CAAC,aAAa,CAAC,CAAC;IAClD,UAAU,CAAC,EAAE,oBAAoB,CAAC,YAAY,CAAC,CAAC;IAChD,KAAK,CAAC,EAAE,oBAAoB,CAAC,OAAO,CAAC,CAAC;CACtC,KACC,gBAAgB,EAwHlB,CAAC;AAEF,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,KAAG,gBAStE,CAAC;AAEH,eAAO,MAAM,kCAAkC,UAAW,gBAAgB,KAAG,gBAe3E,CAAC;AAEH,eAAO,MAAM,YAAY,UAAW,gBAAgB,KAAG,gBAQrD,CAAC;AAEH,eAAO,MAAM,WAAW,UAAW,gBAAgB,KAAG,gBAQpD,CAAC;AAEH,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,KAAG,gBAW3D,CAAC;AAEH,eAAO,MAAM,cAAc,UAAW,gBAAgB,KAAG,gBAOvD,CAAC;AAEH,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,KAAG,gBA+B5D,CAAC;AAEF,eAAO,MAAM,gCAAgC,UAAW,gBAAgB,KAAG,gBAWzE,CAAC;AAEH,eAAO,MAAM,cAAc,EAAE,OAAO,CACnC,MAAM,CACL,YAAY,EACZ,CACC,KAAK,EAAE,gBAAgB,EACvB,WAAW,EAAE,MAAM,EACnB,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,KAC1B,gBAAgB,GAAG,gBAAgB,EAAE,CAC1C,CA8BD,CAAC;AAEF,wBAAgB,yBAAyB,CACxC,KAAK,EAAE,gBAAgB,EACvB,WAAW,EAAE,MAAM,EACnB,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAC5B,gBAAgB,GAAG,gBAAgB,EAAE,CAwBvC;AAED,wBAAgB,yBAAyB,CAAC,KAAK,EAAE,gBAAgB,GAAG,OAAO,CAE1E"}
1
+ {"version":3,"file":"python.d.ts","sourceRoot":"","sources":["../../../src/snippets/python.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AACpD,OAAO,KAAK,EAAE,0BAA0B,EAAE,oBAAoB,EAAE,MAAM,mBAAmB,CAAC;AAG1F,OAAO,KAAK,EAAE,gBAAgB,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAErE,eAAO,MAAM,qBAAqB,UAC1B,gBAAgB,eACV,MAAM,SACZ;IACN,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,QAAQ,CAAC,EAAE,0BAA0B,EAAE,CAAC;IACxC,WAAW,CAAC,EAAE,oBAAoB,CAAC,aAAa,CAAC,CAAC;IAClD,UAAU,CAAC,EAAE,oBAAoB,CAAC,YAAY,CAAC,CAAC;IAChD,KAAK,CAAC,EAAE,oBAAoB,CAAC,OAAO,CAAC,CAAC;CACtC,KACC,gBAAgB,EAwGlB,CAAC;AAEF,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,KAAG,gBAStE,CAAC;AAEH,eAAO,MAAM,kCAAkC,UAAW,gBAAgB,KAAG,gBAe3E,CAAC;AAEH,eAAO,MAAM,YAAY,UAAW,gBAAgB,KAAG,gBAQrD,CAAC;AAEH,eAAO,MAAM,WAAW,UAAW,gBAAgB,KAAG,gBAQpD,CAAC;AAEH,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,KAAG,gBAW3D,CAAC;AAEH,eAAO,MAAM,cAAc,UAAW,gBAAgB,KAAG,gBAOvD,CAAC;AAEH,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,KAAG,gBA+B5D,CAAC;AAEF,eAAO,MAAM,gCAAgC,UAAW,gBAAgB,KAAG,gBAWzE,CAAC;AAEH,eAAO,MAAM,cAAc,EAAE,OAAO,CACnC,MAAM,CACL,YAAY,EACZ,CACC,KAAK,EAAE,gBAAgB,EACvB,WAAW,EAAE,MAAM,EACnB,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,KAC1B,gBAAgB,GAAG,gBAAgB,EAAE,CAC1C,CA8BD,CAAC;AAEF,wBAAgB,yBAAyB,CACxC,KAAK,EAAE,gBAAgB,EACvB,WAAW,EAAE,MAAM,EACnB,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAC5B,gBAAgB,GAAG,gBAAgB,EAAE,CAwBvC;AAED,wBAAgB,yBAAyB,CAAC,KAAK,EAAE,gBAAgB,GAAG,OAAO,CAE1E"}
@@ -1 +1 @@
1
- {"version":3,"file":"data.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-segmentation/data.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,IAAI,CAAC;AAEzC,QAAA,MAAM,QAAQ,EAAE,cA+Ff,CAAC;AAEF,eAAe,QAAQ,CAAC"}
1
+ {"version":3,"file":"data.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-segmentation/data.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,IAAI,CAAC;AAEzC,QAAA,MAAM,QAAQ,EAAE,cA8Ff,CAAC;AAEF,eAAe,QAAQ,CAAC"}
@@ -1 +1 @@
1
- {"version":3,"file":"data.d.ts","sourceRoot":"","sources":["../../../../src/tasks/object-detection/data.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,IAAI,CAAC;AAEzC,QAAA,MAAM,QAAQ,EAAE,cAkFf,CAAC;AAEF,eAAe,QAAQ,CAAC"}
1
+ {"version":3,"file":"data.d.ts","sourceRoot":"","sources":["../../../../src/tasks/object-detection/data.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,IAAI,CAAC;AAEzC,QAAA,MAAM,QAAQ,EAAE,cAiFf,CAAC;AAEF,eAAe,QAAQ,CAAC"}
@@ -1 +1 @@
1
- {"version":3,"file":"data.d.ts","sourceRoot":"","sources":["../../../../src/tasks/text-to-speech/data.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,IAAI,CAAC;AAEzC,QAAA,MAAM,QAAQ,EAAE,cAyEf,CAAC;AAEF,eAAe,QAAQ,CAAC"}
1
+ {"version":3,"file":"data.d.ts","sourceRoot":"","sources":["../../../../src/tasks/text-to-speech/data.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,IAAI,CAAC;AAEzC,QAAA,MAAM,QAAQ,EAAE,cA0Ef,CAAC;AAEF,eAAe,QAAQ,CAAC"}
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@huggingface/tasks",
3
3
  "packageManager": "pnpm@8.10.5",
4
- "version": "0.12.27",
4
+ "version": "0.12.28",
5
5
  "description": "List of ML tasks for huggingface.co/tasks",
6
6
  "repository": "https://github.com/huggingface/huggingface.js.git",
7
7
  "publishConfig": {
@@ -373,6 +373,12 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
373
373
  repoUrl: "https://github.com/abetlen/llama-cpp-python",
374
374
  snippets: snippets.llama_cpp_python,
375
375
  },
376
+ "mini-omni2": {
377
+ prettyLabel: "Mini-Omni2",
378
+ repoName: "Mini-Omni2",
379
+ repoUrl: "https://github.com/gpt-omni/mini-omni2",
380
+ countDownloads: `path:"model_config.yaml"`,
381
+ },
376
382
  mindspore: {
377
383
  prettyLabel: "MindSpore",
378
384
  repoName: "mindspore",
package/src/pipelines.ts CHANGED
@@ -1,4 +1,4 @@
1
- export const MODALITIES = ["cv", "nlp", "audio", "tabular", "multimodal", "rl", "other"] as const;
1
+ export const MODALITIES = ["multimodal", "nlp", "cv", "audio", "tabular", "rl", "other"] as const;
2
2
 
3
3
  export type Modality = (typeof MODALITIES)[number];
4
4
 
@@ -26,23 +26,7 @@ export const snippetTextGeneration = (
26
26
  if (model.tags.includes("conversational")) {
27
27
  // Conversational model detected, so we display a code snippet that features the Messages API
28
28
  const streaming = opts?.streaming ?? true;
29
- const exampleMessages: ChatCompletionInputMessage[] =
30
- model.pipeline_tag === "text-generation"
31
- ? [{ role: "user", content: "What is the capital of France?" }]
32
- : [
33
- {
34
- role: "user",
35
- content: [
36
- {
37
- type: "image_url",
38
- image_url: {
39
- url: "https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg",
40
- },
41
- },
42
- { type: "text", text: "Describe this image in one sentence." },
43
- ],
44
- },
45
- ];
29
+ const exampleMessages = getModelInputSnippet(model) as ChatCompletionInputMessage[];
46
30
  const messages = opts?.messages ?? exampleMessages;
47
31
 
48
32
  const config = {
@@ -1,4 +1,5 @@
1
1
  import type { PipelineType } from "../pipelines";
2
+ import type { ChatCompletionInputMessage } from "../tasks";
2
3
  import type { ModelDataMinimal } from "./types";
3
4
 
4
5
  const inputsZeroShotClassification = () =>
@@ -40,7 +41,30 @@ const inputsTextClassification = () => `"I like you. I love you"`;
40
41
 
41
42
  const inputsTokenClassification = () => `"My name is Sarah Jessica Parker but you can call me Jessica"`;
42
43
 
43
- const inputsTextGeneration = () => `"Can you please let us know more details about your "`;
44
+ const inputsTextGeneration = (model: ModelDataMinimal): string | ChatCompletionInputMessage[] => {
45
+ if (model.tags.includes("conversational")) {
46
+ return model.pipeline_tag === "text-generation"
47
+ ? [{ role: "user", content: "What is the capital of France?" }]
48
+ : [
49
+ {
50
+ role: "user",
51
+ content: [
52
+ {
53
+ type: "text",
54
+ text: "Describe this image in one sentence.",
55
+ },
56
+ {
57
+ type: "image_url",
58
+ image_url: {
59
+ url: "https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg",
60
+ },
61
+ },
62
+ ],
63
+ },
64
+ ];
65
+ }
66
+ return `"Can you please let us know more details about your "`;
67
+ };
44
68
 
45
69
  const inputsText2TextGeneration = () => `"The answer to the universe is"`;
46
70
 
@@ -84,7 +108,7 @@ const inputsTabularPrediction = () =>
84
108
  const inputsZeroShotImageClassification = () => `"cats.jpg"`;
85
109
 
86
110
  const modelInputSnippets: {
87
- [key in PipelineType]?: (model: ModelDataMinimal) => string;
111
+ [key in PipelineType]?: (model: ModelDataMinimal) => string | ChatCompletionInputMessage[];
88
112
  } = {
89
113
  "audio-to-audio": inputsAudioToAudio,
90
114
  "audio-classification": inputsAudioClassification,
@@ -116,18 +140,24 @@ const modelInputSnippets: {
116
140
 
117
141
  // Use noWrap to put the whole snippet on a single line (removing new lines and tabulations)
118
142
  // Use noQuotes to strip quotes from start & end (example: "abc" -> abc)
119
- export function getModelInputSnippet(model: ModelDataMinimal, noWrap = false, noQuotes = false): string {
143
+ export function getModelInputSnippet(
144
+ model: ModelDataMinimal,
145
+ noWrap = false,
146
+ noQuotes = false
147
+ ): string | ChatCompletionInputMessage[] {
120
148
  if (model.pipeline_tag) {
121
149
  const inputs = modelInputSnippets[model.pipeline_tag];
122
150
  if (inputs) {
123
151
  let result = inputs(model);
124
- if (noWrap) {
125
- result = result.replace(/(?:(?:\r?\n|\r)\t*)|\t+/g, " ");
126
- }
127
- if (noQuotes) {
128
- const REGEX_QUOTES = /^"(.+)"$/s;
129
- const match = result.match(REGEX_QUOTES);
130
- result = match ? match[1] : result;
152
+ if (typeof result === "string") {
153
+ if (noWrap) {
154
+ result = result.replace(/(?:(?:\r?\n|\r)\t*)|\t+/g, " ");
155
+ }
156
+ if (noQuotes) {
157
+ const REGEX_QUOTES = /^"(.+)"$/s;
158
+ const match = result.match(REGEX_QUOTES);
159
+ result = match ? match[1] : result;
160
+ }
131
161
  }
132
162
  return result;
133
163
  }
@@ -40,23 +40,7 @@ export const snippetTextGeneration = (
40
40
  if (model.tags.includes("conversational")) {
41
41
  // Conversational model detected, so we display a code snippet that features the Messages API
42
42
  const streaming = opts?.streaming ?? true;
43
- const exampleMessages: ChatCompletionInputMessage[] =
44
- model.pipeline_tag === "text-generation"
45
- ? [{ role: "user", content: "What is the capital of France?" }]
46
- : [
47
- {
48
- role: "user",
49
- content: [
50
- {
51
- type: "image_url",
52
- image_url: {
53
- url: "https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg",
54
- },
55
- },
56
- { type: "text", text: "Describe this image in one sentence." },
57
- ],
58
- },
59
- ];
43
+ const exampleMessages = getModelInputSnippet(model) as ChatCompletionInputMessage[];
60
44
  const messages = opts?.messages ?? exampleMessages;
61
45
  const messagesStr = stringifyMessages(messages, { sep: ",\n\t\t", start: "[\n\t\t", end: "\n\t]" });
62
46
 
@@ -16,23 +16,7 @@ export const snippetConversational = (
16
16
  }
17
17
  ): InferenceSnippet[] => {
18
18
  const streaming = opts?.streaming ?? true;
19
- const exampleMessages: ChatCompletionInputMessage[] =
20
- model.pipeline_tag === "text-generation"
21
- ? [{ role: "user", content: "What is the capital of France?" }]
22
- : [
23
- {
24
- role: "user",
25
- content: [
26
- {
27
- type: "image_url",
28
- image_url: {
29
- url: "https://cdn.britannica.com/61/93061-050-99147DCE/Statue-of-Liberty-Island-New-York-Bay.jpg",
30
- },
31
- },
32
- { type: "text", text: "Describe this image in one sentence." },
33
- ],
34
- },
35
- ];
19
+ const exampleMessages = getModelInputSnippet(model) as ChatCompletionInputMessage[];
36
20
  const messages = opts?.messages ?? exampleMessages;
37
21
  const messagesStr = stringifyMessages(messages, {
38
22
  sep: ",\n\t",
@@ -44,8 +44,7 @@ const taskData: TaskDataCustom = {
44
44
  models: [
45
45
  {
46
46
  // TO DO: write description
47
- description:
48
- "Solid semantic segmentation model trained on ADE20k.",
47
+ description: "Solid semantic segmentation model trained on ADE20k.",
49
48
  id: "openmmlab/upernet-convnext-small",
50
49
  },
51
50
  {
@@ -51,8 +51,7 @@ const taskData: TaskDataCustom = {
51
51
  id: "jameslahm/yolov10x",
52
52
  },
53
53
  {
54
- description:
55
- "Fast and accurate object detection model trained on COCO and Object365 datasets.",
54
+ description: "Fast and accurate object detection model trained on COCO and Object365 datasets.",
56
55
  id: "PekingU/rtdetr_r18vd_coco_o365",
57
56
  },
58
57
  ],
@@ -57,7 +57,8 @@ const taskData: TaskDataCustom = {
57
57
  id: "suno/bark",
58
58
  },
59
59
  {
60
- description: "An application on XTTS, a voice generation model that lets you clone voices into different languages.",
60
+ description:
61
+ "An application on XTTS, a voice generation model that lets you clone voices into different languages.",
61
62
  id: "coqui/xtts",
62
63
  },
63
64
  {