@huggingface/tasks 0.11.8 → 0.11.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -4652,6 +4652,11 @@ var pyannote_audio = (model) => {
4652
4652
  }
4653
4653
  return pyannote_audio_model(model);
4654
4654
  };
4655
+ var relik = (model) => [
4656
+ `from relik import Relik
4657
+
4658
+ relik = Relik.from_pretrained("${model.id}")`
4659
+ ];
4655
4660
  var tensorflowttsTextToMel = (model) => [
4656
4661
  `from tensorflow_tts.inference import AutoProcessor, TFAutoModel
4657
4662
 
@@ -4784,6 +4789,33 @@ var fastai = (model) => [
4784
4789
 
4785
4790
  learn = from_pretrained_fastai("${model.id}")`
4786
4791
  ];
4792
+ var sam2 = (model) => {
4793
+ const image_predictor = `# Use SAM2 with images
4794
+ import torch
4795
+ from sam2.sam2_image_predictor import SAM2ImagePredictor
4796
+
4797
+ predictor = SAM2ImagePredictor.from_pretrained(${model.id})
4798
+
4799
+ with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16):
4800
+ predictor.set_image(<your_image>)
4801
+ masks, _, _ = predictor.predict(<input_prompts>)`;
4802
+ const video_predictor = `# Use SAM2 with videos
4803
+ import torch
4804
+ from sam2.sam2_video_predictor import SAM2VideoPredictor
4805
+
4806
+ predictor = SAM2VideoPredictor.from_pretrained(${model.id})
4807
+
4808
+ with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16):
4809
+ state = predictor.init_state(<your_video>)
4810
+
4811
+ # add new prompts and instantly get the output on the same frame
4812
+ frame_idx, object_ids, masks = predictor.add_new_points(state, <your_prompts>):
4813
+
4814
+ # propagate the prompts to get masklets throughout the video
4815
+ for frame_idx, object_ids, masks in predictor.propagate_in_video(state):
4816
+ ...`;
4817
+ return [image_predictor, video_predictor];
4818
+ };
4787
4819
  var sampleFactory = (model) => [
4788
4820
  `python -m sample_factory.huggingface.load_from_hub -r ${model.id} -d ./train_dir`
4789
4821
  ];
@@ -5450,6 +5482,21 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
5450
5482
  filter: false,
5451
5483
  countDownloads: `path:"tokenizer.model"`
5452
5484
  },
5485
+ relik: {
5486
+ prettyLabel: "Relik",
5487
+ repoName: "Relik",
5488
+ repoUrl: "https://github.com/SapienzaNLP/relik",
5489
+ snippets: relik,
5490
+ filter: false
5491
+ },
5492
+ refiners: {
5493
+ prettyLabel: "Refiners",
5494
+ repoName: "Refiners",
5495
+ repoUrl: "https://github.com/finegrain-ai/refiners",
5496
+ docsUrl: "https://refine.rs/",
5497
+ filter: false,
5498
+ countDownloads: `path:"model.safetensors"`
5499
+ },
5453
5500
  saelens: {
5454
5501
  prettyLabel: "SAELens",
5455
5502
  repoName: "SAELens",
@@ -5462,6 +5509,7 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
5462
5509
  repoName: "sam2",
5463
5510
  repoUrl: "https://github.com/facebookresearch/segment-anything-2",
5464
5511
  filter: false,
5512
+ snippets: sam2,
5465
5513
  countDownloads: `path_extension:"pt"`
5466
5514
  },
5467
5515
  "sample-factory": {
@@ -6605,6 +6653,27 @@ var snippetLlamacpp = (model, filepath) => {
6605
6653
  }
6606
6654
  ];
6607
6655
  };
6656
+ var snippetLocalAI = (model, filepath) => {
6657
+ const command = (binary) => ["# Load and run the model:", `${binary} huggingface://${model.id}/${filepath ?? "{{GGUF_FILE}}"}`].join("\n");
6658
+ return [
6659
+ {
6660
+ title: "Install from binary",
6661
+ setup: "curl https://localai.io/install.sh | sh",
6662
+ content: command("local-ai run")
6663
+ },
6664
+ {
6665
+ title: "Use Docker images",
6666
+ setup: [
6667
+ // prettier-ignore
6668
+ "# Pull the image:",
6669
+ "docker pull localai/localai:latest-cpu"
6670
+ ].join("\n"),
6671
+ content: command(
6672
+ "docker run -p 8080:8080 --name localai -v $PWD/models:/build/models localai/localai:latest-cpu"
6673
+ )
6674
+ }
6675
+ ];
6676
+ };
6608
6677
  var LOCAL_APPS = {
6609
6678
  "llama.cpp": {
6610
6679
  prettyLabel: "llama.cpp",
@@ -6620,6 +6689,13 @@ var LOCAL_APPS = {
6620
6689
  displayOnModelPage: isGgufModel,
6621
6690
  deeplink: (model, filepath) => new URL(`lmstudio://open_from_hf?model=${model.id}${filepath ? `&file=${filepath}` : ""}`)
6622
6691
  },
6692
+ localai: {
6693
+ prettyLabel: "LocalAI",
6694
+ docsUrl: "https://github.com/mudler/LocalAI",
6695
+ mainTask: "text-generation",
6696
+ displayOnModelPage: isGgufModel,
6697
+ snippet: snippetLocalAI
6698
+ },
6623
6699
  jan: {
6624
6700
  prettyLabel: "Jan",
6625
6701
  docsUrl: "https://jan.ai",
@@ -6692,6 +6768,21 @@ var LOCAL_APPS = {
6692
6768
  macOSOnly: true,
6693
6769
  displayOnModelPage: (model) => model.library_name === "diffusers" && model.pipeline_tag === "text-to-image",
6694
6770
  deeplink: (model) => new URL(`diffusionbee://open_from_hf?model=${model.id}`)
6771
+ },
6772
+ joyfusion: {
6773
+ prettyLabel: "JoyFusion",
6774
+ docsUrl: "https://joyfusion.app",
6775
+ mainTask: "text-to-image",
6776
+ macOSOnly: true,
6777
+ displayOnModelPage: (model) => model.tags.includes("coreml") && model.tags.includes("joyfusion") && model.pipeline_tag === "text-to-image",
6778
+ deeplink: (model) => new URL(`https://joyfusion.app/import_from_hf?repo_id=${model.id}`)
6779
+ },
6780
+ invoke: {
6781
+ prettyLabel: "Invoke",
6782
+ docsUrl: "https://github.com/invoke-ai/InvokeAI",
6783
+ mainTask: "text-to-image",
6784
+ displayOnModelPage: (model) => model.library_name === "diffusers" && model.pipeline_tag === "text-to-image",
6785
+ deeplink: (model) => new URL(`https://models.invoke.ai/huggingface/${model.id}`)
6695
6786
  }
6696
6787
  };
6697
6788
 
package/dist/index.js CHANGED
@@ -4614,6 +4614,11 @@ var pyannote_audio = (model) => {
4614
4614
  }
4615
4615
  return pyannote_audio_model(model);
4616
4616
  };
4617
+ var relik = (model) => [
4618
+ `from relik import Relik
4619
+
4620
+ relik = Relik.from_pretrained("${model.id}")`
4621
+ ];
4617
4622
  var tensorflowttsTextToMel = (model) => [
4618
4623
  `from tensorflow_tts.inference import AutoProcessor, TFAutoModel
4619
4624
 
@@ -4746,6 +4751,33 @@ var fastai = (model) => [
4746
4751
 
4747
4752
  learn = from_pretrained_fastai("${model.id}")`
4748
4753
  ];
4754
+ var sam2 = (model) => {
4755
+ const image_predictor = `# Use SAM2 with images
4756
+ import torch
4757
+ from sam2.sam2_image_predictor import SAM2ImagePredictor
4758
+
4759
+ predictor = SAM2ImagePredictor.from_pretrained(${model.id})
4760
+
4761
+ with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16):
4762
+ predictor.set_image(<your_image>)
4763
+ masks, _, _ = predictor.predict(<input_prompts>)`;
4764
+ const video_predictor = `# Use SAM2 with videos
4765
+ import torch
4766
+ from sam2.sam2_video_predictor import SAM2VideoPredictor
4767
+
4768
+ predictor = SAM2VideoPredictor.from_pretrained(${model.id})
4769
+
4770
+ with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16):
4771
+ state = predictor.init_state(<your_video>)
4772
+
4773
+ # add new prompts and instantly get the output on the same frame
4774
+ frame_idx, object_ids, masks = predictor.add_new_points(state, <your_prompts>):
4775
+
4776
+ # propagate the prompts to get masklets throughout the video
4777
+ for frame_idx, object_ids, masks in predictor.propagate_in_video(state):
4778
+ ...`;
4779
+ return [image_predictor, video_predictor];
4780
+ };
4749
4781
  var sampleFactory = (model) => [
4750
4782
  `python -m sample_factory.huggingface.load_from_hub -r ${model.id} -d ./train_dir`
4751
4783
  ];
@@ -5412,6 +5444,21 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
5412
5444
  filter: false,
5413
5445
  countDownloads: `path:"tokenizer.model"`
5414
5446
  },
5447
+ relik: {
5448
+ prettyLabel: "Relik",
5449
+ repoName: "Relik",
5450
+ repoUrl: "https://github.com/SapienzaNLP/relik",
5451
+ snippets: relik,
5452
+ filter: false
5453
+ },
5454
+ refiners: {
5455
+ prettyLabel: "Refiners",
5456
+ repoName: "Refiners",
5457
+ repoUrl: "https://github.com/finegrain-ai/refiners",
5458
+ docsUrl: "https://refine.rs/",
5459
+ filter: false,
5460
+ countDownloads: `path:"model.safetensors"`
5461
+ },
5415
5462
  saelens: {
5416
5463
  prettyLabel: "SAELens",
5417
5464
  repoName: "SAELens",
@@ -5424,6 +5471,7 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
5424
5471
  repoName: "sam2",
5425
5472
  repoUrl: "https://github.com/facebookresearch/segment-anything-2",
5426
5473
  filter: false,
5474
+ snippets: sam2,
5427
5475
  countDownloads: `path_extension:"pt"`
5428
5476
  },
5429
5477
  "sample-factory": {
@@ -6567,6 +6615,27 @@ var snippetLlamacpp = (model, filepath) => {
6567
6615
  }
6568
6616
  ];
6569
6617
  };
6618
+ var snippetLocalAI = (model, filepath) => {
6619
+ const command = (binary) => ["# Load and run the model:", `${binary} huggingface://${model.id}/${filepath ?? "{{GGUF_FILE}}"}`].join("\n");
6620
+ return [
6621
+ {
6622
+ title: "Install from binary",
6623
+ setup: "curl https://localai.io/install.sh | sh",
6624
+ content: command("local-ai run")
6625
+ },
6626
+ {
6627
+ title: "Use Docker images",
6628
+ setup: [
6629
+ // prettier-ignore
6630
+ "# Pull the image:",
6631
+ "docker pull localai/localai:latest-cpu"
6632
+ ].join("\n"),
6633
+ content: command(
6634
+ "docker run -p 8080:8080 --name localai -v $PWD/models:/build/models localai/localai:latest-cpu"
6635
+ )
6636
+ }
6637
+ ];
6638
+ };
6570
6639
  var LOCAL_APPS = {
6571
6640
  "llama.cpp": {
6572
6641
  prettyLabel: "llama.cpp",
@@ -6582,6 +6651,13 @@ var LOCAL_APPS = {
6582
6651
  displayOnModelPage: isGgufModel,
6583
6652
  deeplink: (model, filepath) => new URL(`lmstudio://open_from_hf?model=${model.id}${filepath ? `&file=${filepath}` : ""}`)
6584
6653
  },
6654
+ localai: {
6655
+ prettyLabel: "LocalAI",
6656
+ docsUrl: "https://github.com/mudler/LocalAI",
6657
+ mainTask: "text-generation",
6658
+ displayOnModelPage: isGgufModel,
6659
+ snippet: snippetLocalAI
6660
+ },
6585
6661
  jan: {
6586
6662
  prettyLabel: "Jan",
6587
6663
  docsUrl: "https://jan.ai",
@@ -6654,6 +6730,21 @@ var LOCAL_APPS = {
6654
6730
  macOSOnly: true,
6655
6731
  displayOnModelPage: (model) => model.library_name === "diffusers" && model.pipeline_tag === "text-to-image",
6656
6732
  deeplink: (model) => new URL(`diffusionbee://open_from_hf?model=${model.id}`)
6733
+ },
6734
+ joyfusion: {
6735
+ prettyLabel: "JoyFusion",
6736
+ docsUrl: "https://joyfusion.app",
6737
+ mainTask: "text-to-image",
6738
+ macOSOnly: true,
6739
+ displayOnModelPage: (model) => model.tags.includes("coreml") && model.tags.includes("joyfusion") && model.pipeline_tag === "text-to-image",
6740
+ deeplink: (model) => new URL(`https://joyfusion.app/import_from_hf?repo_id=${model.id}`)
6741
+ },
6742
+ invoke: {
6743
+ prettyLabel: "Invoke",
6744
+ docsUrl: "https://github.com/invoke-ai/InvokeAI",
6745
+ mainTask: "text-to-image",
6746
+ displayOnModelPage: (model) => model.library_name === "diffusers" && model.pipeline_tag === "text-to-image",
6747
+ deeplink: (model) => new URL(`https://models.invoke.ai/huggingface/${model.id}`)
6657
6748
  }
6658
6749
  };
6659
6750
 
@@ -78,6 +78,13 @@ export declare const LOCAL_APPS: {
78
78
  displayOnModelPage: typeof isGgufModel;
79
79
  deeplink: (model: ModelData, filepath: string | undefined) => URL;
80
80
  };
81
+ localai: {
82
+ prettyLabel: string;
83
+ docsUrl: string;
84
+ mainTask: "text-generation";
85
+ displayOnModelPage: typeof isGgufModel;
86
+ snippet: (model: ModelData, filepath?: string) => LocalAppSnippet[];
87
+ };
81
88
  jan: {
82
89
  prettyLabel: string;
83
90
  docsUrl: string;
@@ -137,6 +144,21 @@ export declare const LOCAL_APPS: {
137
144
  displayOnModelPage: (model: ModelData) => boolean;
138
145
  deeplink: (model: ModelData) => URL;
139
146
  };
147
+ joyfusion: {
148
+ prettyLabel: string;
149
+ docsUrl: string;
150
+ mainTask: "text-to-image";
151
+ macOSOnly: true;
152
+ displayOnModelPage: (model: ModelData) => boolean;
153
+ deeplink: (model: ModelData) => URL;
154
+ };
155
+ invoke: {
156
+ prettyLabel: string;
157
+ docsUrl: string;
158
+ mainTask: "text-to-image";
159
+ displayOnModelPage: (model: ModelData) => boolean;
160
+ deeplink: (model: ModelData) => URL;
161
+ };
140
162
  };
141
163
  export type LocalAppKey = keyof typeof LOCAL_APPS;
142
164
  export {};
@@ -1 +1 @@
1
- {"version":3,"file":"local-apps.d.ts","sourceRoot":"","sources":["../../src/local-apps.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAEhD,MAAM,WAAW,eAAe;IAC/B;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;CAChB;AAED;;GAEG;AACH,MAAM,MAAM,QAAQ,GAAG;IACtB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,QAAQ,EAAE,YAAY,CAAC;IACvB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,kBAAkB,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,OAAO,CAAC;CAClD,GAAG,CACD;IACA;;OAEG;IACH,QAAQ,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,GAAG,CAAC;CACtD,GACD;IACA;;;OAGG;IACH,OAAO,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,MAAM,GAAG,MAAM,EAAE,GAAG,eAAe,GAAG,eAAe,EAAE,CAAC;CACzG,CACH,CAAC;AAEF,iBAAS,WAAW,CAAC,KAAK,EAAE,SAAS,WAEpC;AAuCD;;;;;;;;;;GAUG;AACH,eAAO,MAAM,UAAU;;;;;;yBAhDS,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA8I5C,CAAC;AAErC,MAAM,MAAM,WAAW,GAAG,MAAM,OAAO,UAAU,CAAC"}
1
+ {"version":3,"file":"local-apps.d.ts","sourceRoot":"","sources":["../../src/local-apps.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAEhD,MAAM,WAAW,eAAe;IAC/B;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;CAChB;AAED;;GAEG;AACH,MAAM,MAAM,QAAQ,GAAG;IACtB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,QAAQ,EAAE,YAAY,CAAC;IACvB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,kBAAkB,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,OAAO,CAAC;CAClD,GAAG,CACD;IACA;;OAEG;IACH,QAAQ,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,GAAG,CAAC;CACtD,GACD;IACA;;;OAGG;IACH,OAAO,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,MAAM,GAAG,MAAM,EAAE,GAAG,eAAe,GAAG,eAAe,EAAE,CAAC;CACzG,CACH,CAAC;AAEF,iBAAS,WAAW,CAAC,KAAK,EAAE,SAAS,WAEpC;AA8DD;;;;;;;;;;GAUG;AACH,eAAO,MAAM,UAAU;;;;;;yBAvES,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;;;;;;;;;yBAqCjD,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAsJ3C,CAAC;AAErC,MAAM,MAAM,WAAW,GAAG,MAAM,OAAO,UAAU,CAAC"}
@@ -25,12 +25,14 @@ export declare const open_clip: (model: ModelData) => string[];
25
25
  export declare const paddlenlp: (model: ModelData) => string[];
26
26
  export declare const pyannote_audio_pipeline: (model: ModelData) => string[];
27
27
  export declare const pyannote_audio: (model: ModelData) => string[];
28
+ export declare const relik: (model: ModelData) => string[];
28
29
  export declare const tensorflowtts: (model: ModelData) => string[];
29
30
  export declare const timm: (model: ModelData) => string[];
30
31
  export declare const saelens: () => string[];
31
32
  export declare const sklearn: (model: ModelData) => string[];
32
33
  export declare const stable_audio_tools: (model: ModelData) => string[];
33
34
  export declare const fastai: (model: ModelData) => string[];
35
+ export declare const sam2: (model: ModelData) => string[];
34
36
  export declare const sampleFactory: (model: ModelData) => string[];
35
37
  export declare const sentenceTransformers: (model: ModelData) => string[];
36
38
  export declare const setfit: (model: ModelData) => string[];
@@ -1 +1 @@
1
- {"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAY9C,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAMF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,iBAAiB,UAAW,SAAS,KAAG,MAAM,EA6C1D,CAAC;AA+BF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAS9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,OAAO,QAA6B,MAAM,EAQtD,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AAEF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAI7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EA4CrD,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,OAAO,QAAO,MAAM,EAYhC,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAYjD,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAK5C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AA6BF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAUnD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAYnC,CAAC"}
1
+ {"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAY9C,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAMF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,iBAAiB,UAAW,SAAS,KAAG,MAAM,EA6C1D,CAAC;AA+BF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAS9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,OAAO,QAA6B,MAAM,EAQtD,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EA2B7C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AAEF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAI7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EA4CrD,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,OAAO,QAAO,MAAM,EAYhC,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAYjD,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAK5C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AA6BF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAUnD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAYnC,CAAC"}
@@ -429,6 +429,21 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
429
429
  filter: false;
430
430
  countDownloads: string;
431
431
  };
432
+ relik: {
433
+ prettyLabel: string;
434
+ repoName: string;
435
+ repoUrl: string;
436
+ snippets: (model: ModelData) => string[];
437
+ filter: false;
438
+ };
439
+ refiners: {
440
+ prettyLabel: string;
441
+ repoName: string;
442
+ repoUrl: string;
443
+ docsUrl: string;
444
+ filter: false;
445
+ countDownloads: string;
446
+ };
432
447
  saelens: {
433
448
  prettyLabel: string;
434
449
  repoName: string;
@@ -441,6 +456,7 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
441
456
  repoName: string;
442
457
  repoUrl: string;
443
458
  filter: false;
459
+ snippets: (model: ModelData) => string[];
444
460
  countDownloads: string;
445
461
  };
446
462
  "sample-factory": {
@@ -605,6 +621,6 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
605
621
  };
606
622
  };
607
623
  export type ModelLibraryKey = keyof typeof MODEL_LIBRARIES_UI_ELEMENTS;
608
- export declare const ALL_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "depth-anything-v2" | "diffree" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
609
- export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "depth-anything-v2" | "diffree" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
624
+ export declare const ALL_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "depth-anything-v2" | "diffree" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
625
+ export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "depth-anything-v2" | "diffree" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
610
626
  //# sourceMappingURL=model-libraries.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAwiBI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,o/BAAgE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,o/BAQ1B,CAAC"}
1
+ {"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAwjBI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,2gCAAgE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,2gCAQ1B,CAAC"}
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@huggingface/tasks",
3
3
  "packageManager": "pnpm@8.10.5",
4
- "version": "0.11.8",
4
+ "version": "0.11.10",
5
5
  "description": "List of ML tasks for huggingface.co/tasks",
6
6
  "repository": "https://github.com/huggingface/huggingface.js.git",
7
7
  "publishConfig": {
package/src/local-apps.ts CHANGED
@@ -99,6 +99,29 @@ const snippetLlamacpp = (model: ModelData, filepath?: string): LocalAppSnippet[]
99
99
  ];
100
100
  };
101
101
 
102
+ const snippetLocalAI = (model: ModelData, filepath?: string): LocalAppSnippet[] => {
103
+ const command = (binary: string) =>
104
+ ["# Load and run the model:", `${binary} huggingface://${model.id}/${filepath ?? "{{GGUF_FILE}}"}`].join("\n");
105
+ return [
106
+ {
107
+ title: "Install from binary",
108
+ setup: "curl https://localai.io/install.sh | sh",
109
+ content: command("local-ai run"),
110
+ },
111
+ {
112
+ title: "Use Docker images",
113
+ setup: [
114
+ // prettier-ignore
115
+ "# Pull the image:",
116
+ "docker pull localai/localai:latest-cpu",
117
+ ].join("\n"),
118
+ content: command(
119
+ "docker run -p 8080:8080 --name localai -v $PWD/models:/build/models localai/localai:latest-cpu"
120
+ ),
121
+ },
122
+ ];
123
+ };
124
+
102
125
  /**
103
126
  * Add your new local app here.
104
127
  *
@@ -126,6 +149,13 @@ export const LOCAL_APPS = {
126
149
  deeplink: (model, filepath) =>
127
150
  new URL(`lmstudio://open_from_hf?model=${model.id}${filepath ? `&file=${filepath}` : ""}`),
128
151
  },
152
+ localai: {
153
+ prettyLabel: "LocalAI",
154
+ docsUrl: "https://github.com/mudler/LocalAI",
155
+ mainTask: "text-generation",
156
+ displayOnModelPage: isGgufModel,
157
+ snippet: snippetLocalAI,
158
+ },
129
159
  jan: {
130
160
  prettyLabel: "Jan",
131
161
  docsUrl: "https://jan.ai",
@@ -204,6 +234,21 @@ export const LOCAL_APPS = {
204
234
  displayOnModelPage: (model) => model.library_name === "diffusers" && model.pipeline_tag === "text-to-image",
205
235
  deeplink: (model) => new URL(`diffusionbee://open_from_hf?model=${model.id}`),
206
236
  },
237
+ joyfusion: {
238
+ prettyLabel: "JoyFusion",
239
+ docsUrl: "https://joyfusion.app",
240
+ mainTask: "text-to-image",
241
+ macOSOnly: true,
242
+ displayOnModelPage: (model) => model.tags.includes("coreml") && model.tags.includes("joyfusion") && model.pipeline_tag === "text-to-image",
243
+ deeplink: (model) => new URL(`https://joyfusion.app/import_from_hf?repo_id=${model.id}`),
244
+ },
245
+ invoke: {
246
+ prettyLabel: "Invoke",
247
+ docsUrl: "https://github.com/invoke-ai/InvokeAI",
248
+ mainTask: "text-to-image",
249
+ displayOnModelPage: (model) => model.library_name === "diffusers" && model.pipeline_tag === "text-to-image",
250
+ deeplink: (model) => new URL(`https://models.invoke.ai/huggingface/${model.id}`),
251
+ },
207
252
  } satisfies Record<string, LocalApp>;
208
253
 
209
254
  export type LocalAppKey = keyof typeof LOCAL_APPS;
@@ -381,6 +381,12 @@ export const pyannote_audio = (model: ModelData): string[] => {
381
381
  return pyannote_audio_model(model);
382
382
  };
383
383
 
384
+ export const relik = (model: ModelData): string[] => [
385
+ `from relik import Relik
386
+
387
+ relik = Relik.from_pretrained("${model.id}")`,
388
+ ];
389
+
384
390
  const tensorflowttsTextToMel = (model: ModelData): string[] => [
385
391
  `from tensorflow_tts.inference import AutoProcessor, TFAutoModel
386
392
 
@@ -525,6 +531,35 @@ export const fastai = (model: ModelData): string[] => [
525
531
  learn = from_pretrained_fastai("${model.id}")`,
526
532
  ];
527
533
 
534
+ export const sam2 = (model: ModelData): string[] => {
535
+ const image_predictor = `# Use SAM2 with images
536
+ import torch
537
+ from sam2.sam2_image_predictor import SAM2ImagePredictor
538
+
539
+ predictor = SAM2ImagePredictor.from_pretrained(${model.id})
540
+
541
+ with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16):
542
+ predictor.set_image(<your_image>)
543
+ masks, _, _ = predictor.predict(<input_prompts>)`;
544
+
545
+ const video_predictor = `# Use SAM2 with videos
546
+ import torch
547
+ from sam2.sam2_video_predictor import SAM2VideoPredictor
548
+
549
+ predictor = SAM2VideoPredictor.from_pretrained(${model.id})
550
+
551
+ with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16):
552
+ state = predictor.init_state(<your_video>)
553
+
554
+ # add new prompts and instantly get the output on the same frame
555
+ frame_idx, object_ids, masks = predictor.add_new_points(state, <your_prompts>):
556
+
557
+ # propagate the prompts to get masklets throughout the video
558
+ for frame_idx, object_ids, masks in predictor.propagate_in_video(state):
559
+ ...`;
560
+ return [image_predictor, video_predictor];
561
+ };
562
+
528
563
  export const sampleFactory = (model: ModelData): string[] => [
529
564
  `python -m sample_factory.huggingface.load_from_hub -r ${model.id} -d ./train_dir`,
530
565
  ];
@@ -435,6 +435,21 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
435
435
  filter: false,
436
436
  countDownloads: `path:"tokenizer.model"`,
437
437
  },
438
+ relik: {
439
+ prettyLabel: "Relik",
440
+ repoName: "Relik",
441
+ repoUrl: "https://github.com/SapienzaNLP/relik",
442
+ snippets: snippets.relik,
443
+ filter: false,
444
+ },
445
+ refiners: {
446
+ prettyLabel: "Refiners",
447
+ repoName: "Refiners",
448
+ repoUrl: "https://github.com/finegrain-ai/refiners",
449
+ docsUrl: "https://refine.rs/",
450
+ filter: false,
451
+ countDownloads: `path:"model.safetensors"`,
452
+ },
438
453
  saelens: {
439
454
  prettyLabel: "SAELens",
440
455
  repoName: "SAELens",
@@ -447,6 +462,7 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
447
462
  repoName: "sam2",
448
463
  repoUrl: "https://github.com/facebookresearch/segment-anything-2",
449
464
  filter: false,
465
+ snippets: snippets.sam2,
450
466
  countDownloads: `path_extension:"pt"`,
451
467
  },
452
468
  "sample-factory": {