@huggingface/tasks 0.11.8 → 0.11.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -4784,6 +4784,33 @@ var fastai = (model) => [
4784
4784
 
4785
4785
  learn = from_pretrained_fastai("${model.id}")`
4786
4786
  ];
4787
+ var sam2 = (model) => {
4788
+ const image_predictor = `# Use SAM2 with images
4789
+ import torch
4790
+ from sam2.sam2_image_predictor import SAM2ImagePredictor
4791
+
4792
+ predictor = SAM2ImagePredictor.from_pretrained(${model.id})
4793
+
4794
+ with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16):
4795
+ predictor.set_image(<your_image>)
4796
+ masks, _, _ = predictor.predict(<input_prompts>)`;
4797
+ const video_predictor = `# Use SAM2 with videos
4798
+ import torch
4799
+ from sam2.sam2_video_predictor import SAM2VideoPredictor
4800
+
4801
+ predictor = SAM2VideoPredictor.from_pretrained(${model.id})
4802
+
4803
+ with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16):
4804
+ state = predictor.init_state(<your_video>)
4805
+
4806
+ # add new prompts and instantly get the output on the same frame
4807
+ frame_idx, object_ids, masks = predictor.add_new_points(state, <your_prompts>):
4808
+
4809
+ # propagate the prompts to get masklets throughout the video
4810
+ for frame_idx, object_ids, masks in predictor.propagate_in_video(state):
4811
+ ...`;
4812
+ return [image_predictor, video_predictor];
4813
+ };
4787
4814
  var sampleFactory = (model) => [
4788
4815
  `python -m sample_factory.huggingface.load_from_hub -r ${model.id} -d ./train_dir`
4789
4816
  ];
@@ -5450,6 +5477,14 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
5450
5477
  filter: false,
5451
5478
  countDownloads: `path:"tokenizer.model"`
5452
5479
  },
5480
+ refiners: {
5481
+ prettyLabel: "Refiners",
5482
+ repoName: "Refiners",
5483
+ repoUrl: "https://github.com/finegrain-ai/refiners",
5484
+ docsUrl: "https://refine.rs/",
5485
+ filter: false,
5486
+ countDownloads: `path:"model.safetensors"`
5487
+ },
5453
5488
  saelens: {
5454
5489
  prettyLabel: "SAELens",
5455
5490
  repoName: "SAELens",
@@ -5462,6 +5497,7 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
5462
5497
  repoName: "sam2",
5463
5498
  repoUrl: "https://github.com/facebookresearch/segment-anything-2",
5464
5499
  filter: false,
5500
+ snippets: sam2,
5465
5501
  countDownloads: `path_extension:"pt"`
5466
5502
  },
5467
5503
  "sample-factory": {
@@ -6605,6 +6641,27 @@ var snippetLlamacpp = (model, filepath) => {
6605
6641
  }
6606
6642
  ];
6607
6643
  };
6644
+ var snippetLocalAI = (model, filepath) => {
6645
+ const command = (binary) => ["# Load and run the model:", `${binary} huggingface://${model.id}/${filepath ?? "{{GGUF_FILE}}"}`].join("\n");
6646
+ return [
6647
+ {
6648
+ title: "Install from binary",
6649
+ setup: "curl https://localai.io/install.sh | sh",
6650
+ content: command("local-ai run")
6651
+ },
6652
+ {
6653
+ title: "Use Docker images",
6654
+ setup: [
6655
+ // prettier-ignore
6656
+ "# Pull the image:",
6657
+ "docker pull localai/localai:latest-cpu"
6658
+ ].join("\n"),
6659
+ content: command(
6660
+ "docker run -p 8080:8080 --name localai -v $PWD/models:/build/models localai/localai:latest-cpu"
6661
+ )
6662
+ }
6663
+ ];
6664
+ };
6608
6665
  var LOCAL_APPS = {
6609
6666
  "llama.cpp": {
6610
6667
  prettyLabel: "llama.cpp",
@@ -6620,6 +6677,13 @@ var LOCAL_APPS = {
6620
6677
  displayOnModelPage: isGgufModel,
6621
6678
  deeplink: (model, filepath) => new URL(`lmstudio://open_from_hf?model=${model.id}${filepath ? `&file=${filepath}` : ""}`)
6622
6679
  },
6680
+ localai: {
6681
+ prettyLabel: "LocalAI",
6682
+ docsUrl: "https://github.com/mudler/LocalAI",
6683
+ mainTask: "text-generation",
6684
+ displayOnModelPage: isGgufModel,
6685
+ snippet: snippetLocalAI
6686
+ },
6623
6687
  jan: {
6624
6688
  prettyLabel: "Jan",
6625
6689
  docsUrl: "https://jan.ai",
@@ -6692,6 +6756,21 @@ var LOCAL_APPS = {
6692
6756
  macOSOnly: true,
6693
6757
  displayOnModelPage: (model) => model.library_name === "diffusers" && model.pipeline_tag === "text-to-image",
6694
6758
  deeplink: (model) => new URL(`diffusionbee://open_from_hf?model=${model.id}`)
6759
+ },
6760
+ joyfusion: {
6761
+ prettyLabel: "JoyFusion",
6762
+ docsUrl: "https://joyfusion.app",
6763
+ mainTask: "text-to-image",
6764
+ macOSOnly: true,
6765
+ displayOnModelPage: (model) => model.tags.includes("coreml") && model.pipeline_tag === "text-to-image",
6766
+ deeplink: (model) => new URL(`https://joyfusion.app/import_from_hf?repo_id=${model.id}`)
6767
+ },
6768
+ invoke: {
6769
+ prettyLabel: "Invoke",
6770
+ docsUrl: "https://github.com/invoke-ai/InvokeAI",
6771
+ mainTask: "text-to-image",
6772
+ displayOnModelPage: (model) => model.library_name === "diffusers" && model.pipeline_tag === "text-to-image",
6773
+ deeplink: (model) => new URL(`https://models.invoke.ai/huggingface/${model.id}`)
6695
6774
  }
6696
6775
  };
6697
6776
 
package/dist/index.js CHANGED
@@ -4746,6 +4746,33 @@ var fastai = (model) => [
4746
4746
 
4747
4747
  learn = from_pretrained_fastai("${model.id}")`
4748
4748
  ];
4749
+ var sam2 = (model) => {
4750
+ const image_predictor = `# Use SAM2 with images
4751
+ import torch
4752
+ from sam2.sam2_image_predictor import SAM2ImagePredictor
4753
+
4754
+ predictor = SAM2ImagePredictor.from_pretrained(${model.id})
4755
+
4756
+ with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16):
4757
+ predictor.set_image(<your_image>)
4758
+ masks, _, _ = predictor.predict(<input_prompts>)`;
4759
+ const video_predictor = `# Use SAM2 with videos
4760
+ import torch
4761
+ from sam2.sam2_video_predictor import SAM2VideoPredictor
4762
+
4763
+ predictor = SAM2VideoPredictor.from_pretrained(${model.id})
4764
+
4765
+ with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16):
4766
+ state = predictor.init_state(<your_video>)
4767
+
4768
+ # add new prompts and instantly get the output on the same frame
4769
+ frame_idx, object_ids, masks = predictor.add_new_points(state, <your_prompts>):
4770
+
4771
+ # propagate the prompts to get masklets throughout the video
4772
+ for frame_idx, object_ids, masks in predictor.propagate_in_video(state):
4773
+ ...`;
4774
+ return [image_predictor, video_predictor];
4775
+ };
4749
4776
  var sampleFactory = (model) => [
4750
4777
  `python -m sample_factory.huggingface.load_from_hub -r ${model.id} -d ./train_dir`
4751
4778
  ];
@@ -5412,6 +5439,14 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
5412
5439
  filter: false,
5413
5440
  countDownloads: `path:"tokenizer.model"`
5414
5441
  },
5442
+ refiners: {
5443
+ prettyLabel: "Refiners",
5444
+ repoName: "Refiners",
5445
+ repoUrl: "https://github.com/finegrain-ai/refiners",
5446
+ docsUrl: "https://refine.rs/",
5447
+ filter: false,
5448
+ countDownloads: `path:"model.safetensors"`
5449
+ },
5415
5450
  saelens: {
5416
5451
  prettyLabel: "SAELens",
5417
5452
  repoName: "SAELens",
@@ -5424,6 +5459,7 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
5424
5459
  repoName: "sam2",
5425
5460
  repoUrl: "https://github.com/facebookresearch/segment-anything-2",
5426
5461
  filter: false,
5462
+ snippets: sam2,
5427
5463
  countDownloads: `path_extension:"pt"`
5428
5464
  },
5429
5465
  "sample-factory": {
@@ -6567,6 +6603,27 @@ var snippetLlamacpp = (model, filepath) => {
6567
6603
  }
6568
6604
  ];
6569
6605
  };
6606
+ var snippetLocalAI = (model, filepath) => {
6607
+ const command = (binary) => ["# Load and run the model:", `${binary} huggingface://${model.id}/${filepath ?? "{{GGUF_FILE}}"}`].join("\n");
6608
+ return [
6609
+ {
6610
+ title: "Install from binary",
6611
+ setup: "curl https://localai.io/install.sh | sh",
6612
+ content: command("local-ai run")
6613
+ },
6614
+ {
6615
+ title: "Use Docker images",
6616
+ setup: [
6617
+ // prettier-ignore
6618
+ "# Pull the image:",
6619
+ "docker pull localai/localai:latest-cpu"
6620
+ ].join("\n"),
6621
+ content: command(
6622
+ "docker run -p 8080:8080 --name localai -v $PWD/models:/build/models localai/localai:latest-cpu"
6623
+ )
6624
+ }
6625
+ ];
6626
+ };
6570
6627
  var LOCAL_APPS = {
6571
6628
  "llama.cpp": {
6572
6629
  prettyLabel: "llama.cpp",
@@ -6582,6 +6639,13 @@ var LOCAL_APPS = {
6582
6639
  displayOnModelPage: isGgufModel,
6583
6640
  deeplink: (model, filepath) => new URL(`lmstudio://open_from_hf?model=${model.id}${filepath ? `&file=${filepath}` : ""}`)
6584
6641
  },
6642
+ localai: {
6643
+ prettyLabel: "LocalAI",
6644
+ docsUrl: "https://github.com/mudler/LocalAI",
6645
+ mainTask: "text-generation",
6646
+ displayOnModelPage: isGgufModel,
6647
+ snippet: snippetLocalAI
6648
+ },
6585
6649
  jan: {
6586
6650
  prettyLabel: "Jan",
6587
6651
  docsUrl: "https://jan.ai",
@@ -6654,6 +6718,21 @@ var LOCAL_APPS = {
6654
6718
  macOSOnly: true,
6655
6719
  displayOnModelPage: (model) => model.library_name === "diffusers" && model.pipeline_tag === "text-to-image",
6656
6720
  deeplink: (model) => new URL(`diffusionbee://open_from_hf?model=${model.id}`)
6721
+ },
6722
+ joyfusion: {
6723
+ prettyLabel: "JoyFusion",
6724
+ docsUrl: "https://joyfusion.app",
6725
+ mainTask: "text-to-image",
6726
+ macOSOnly: true,
6727
+ displayOnModelPage: (model) => model.tags.includes("coreml") && model.pipeline_tag === "text-to-image",
6728
+ deeplink: (model) => new URL(`https://joyfusion.app/import_from_hf?repo_id=${model.id}`)
6729
+ },
6730
+ invoke: {
6731
+ prettyLabel: "Invoke",
6732
+ docsUrl: "https://github.com/invoke-ai/InvokeAI",
6733
+ mainTask: "text-to-image",
6734
+ displayOnModelPage: (model) => model.library_name === "diffusers" && model.pipeline_tag === "text-to-image",
6735
+ deeplink: (model) => new URL(`https://models.invoke.ai/huggingface/${model.id}`)
6657
6736
  }
6658
6737
  };
6659
6738
 
@@ -78,6 +78,13 @@ export declare const LOCAL_APPS: {
78
78
  displayOnModelPage: typeof isGgufModel;
79
79
  deeplink: (model: ModelData, filepath: string | undefined) => URL;
80
80
  };
81
+ localai: {
82
+ prettyLabel: string;
83
+ docsUrl: string;
84
+ mainTask: "text-generation";
85
+ displayOnModelPage: typeof isGgufModel;
86
+ snippet: (model: ModelData, filepath?: string) => LocalAppSnippet[];
87
+ };
81
88
  jan: {
82
89
  prettyLabel: string;
83
90
  docsUrl: string;
@@ -137,6 +144,21 @@ export declare const LOCAL_APPS: {
137
144
  displayOnModelPage: (model: ModelData) => boolean;
138
145
  deeplink: (model: ModelData) => URL;
139
146
  };
147
+ joyfusion: {
148
+ prettyLabel: string;
149
+ docsUrl: string;
150
+ mainTask: "text-to-image";
151
+ macOSOnly: true;
152
+ displayOnModelPage: (model: ModelData) => boolean;
153
+ deeplink: (model: ModelData) => URL;
154
+ };
155
+ invoke: {
156
+ prettyLabel: string;
157
+ docsUrl: string;
158
+ mainTask: "text-to-image";
159
+ displayOnModelPage: (model: ModelData) => boolean;
160
+ deeplink: (model: ModelData) => URL;
161
+ };
140
162
  };
141
163
  export type LocalAppKey = keyof typeof LOCAL_APPS;
142
164
  export {};
@@ -1 +1 @@
1
- {"version":3,"file":"local-apps.d.ts","sourceRoot":"","sources":["../../src/local-apps.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAEhD,MAAM,WAAW,eAAe;IAC/B;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;CAChB;AAED;;GAEG;AACH,MAAM,MAAM,QAAQ,GAAG;IACtB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,QAAQ,EAAE,YAAY,CAAC;IACvB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,kBAAkB,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,OAAO,CAAC;CAClD,GAAG,CACD;IACA;;OAEG;IACH,QAAQ,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,GAAG,CAAC;CACtD,GACD;IACA;;;OAGG;IACH,OAAO,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,MAAM,GAAG,MAAM,EAAE,GAAG,eAAe,GAAG,eAAe,EAAE,CAAC;CACzG,CACH,CAAC;AAEF,iBAAS,WAAW,CAAC,KAAK,EAAE,SAAS,WAEpC;AAuCD;;;;;;;;;;GAUG;AACH,eAAO,MAAM,UAAU;;;;;;yBAhDS,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA8I5C,CAAC;AAErC,MAAM,MAAM,WAAW,GAAG,MAAM,OAAO,UAAU,CAAC"}
1
+ {"version":3,"file":"local-apps.d.ts","sourceRoot":"","sources":["../../src/local-apps.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAEhD,MAAM,WAAW,eAAe;IAC/B;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;CAChB;AAED;;GAEG;AACH,MAAM,MAAM,QAAQ,GAAG;IACtB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,QAAQ,EAAE,YAAY,CAAC;IACvB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,kBAAkB,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,OAAO,CAAC;CAClD,GAAG,CACD;IACA;;OAEG;IACH,QAAQ,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,GAAG,CAAC;CACtD,GACD;IACA;;;OAGG;IACH,OAAO,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,MAAM,GAAG,MAAM,EAAE,GAAG,eAAe,GAAG,eAAe,EAAE,CAAC;CACzG,CACH,CAAC;AAEF,iBAAS,WAAW,CAAC,KAAK,EAAE,SAAS,WAEpC;AA8DD;;;;;;;;;;GAUG;AACH,eAAO,MAAM,UAAU;;;;;;yBAvES,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;;;;;;;;;yBAqCjD,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAsJ3C,CAAC;AAErC,MAAM,MAAM,WAAW,GAAG,MAAM,OAAO,UAAU,CAAC"}
@@ -31,6 +31,7 @@ export declare const saelens: () => string[];
31
31
  export declare const sklearn: (model: ModelData) => string[];
32
32
  export declare const stable_audio_tools: (model: ModelData) => string[];
33
33
  export declare const fastai: (model: ModelData) => string[];
34
+ export declare const sam2: (model: ModelData) => string[];
34
35
  export declare const sampleFactory: (model: ModelData) => string[];
35
36
  export declare const sentenceTransformers: (model: ModelData) => string[];
36
37
  export declare const setfit: (model: ModelData) => string[];
@@ -1 +1 @@
1
- {"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAY9C,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAMF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,iBAAiB,UAAW,SAAS,KAAG,MAAM,EA6C1D,CAAC;AA+BF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAS9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,OAAO,QAA6B,MAAM,EAQtD,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AAEF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAI7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EA4CrD,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,OAAO,QAAO,MAAM,EAYhC,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAYjD,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAK5C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AA6BF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAUnD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAYnC,CAAC"}
1
+ {"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAY9C,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAMF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,iBAAiB,UAAW,SAAS,KAAG,MAAM,EA6C1D,CAAC;AA+BF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAS9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,OAAO,QAA6B,MAAM,EAQtD,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EA2B7C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AAEF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAI7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EA4CrD,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,OAAO,QAAO,MAAM,EAYhC,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAYjD,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAK5C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AA6BF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAUnD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAYnC,CAAC"}
@@ -429,6 +429,14 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
429
429
  filter: false;
430
430
  countDownloads: string;
431
431
  };
432
+ refiners: {
433
+ prettyLabel: string;
434
+ repoName: string;
435
+ repoUrl: string;
436
+ docsUrl: string;
437
+ filter: false;
438
+ countDownloads: string;
439
+ };
432
440
  saelens: {
433
441
  prettyLabel: string;
434
442
  repoName: string;
@@ -441,6 +449,7 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
441
449
  repoName: string;
442
450
  repoUrl: string;
443
451
  filter: false;
452
+ snippets: (model: ModelData) => string[];
444
453
  countDownloads: string;
445
454
  };
446
455
  "sample-factory": {
@@ -605,6 +614,6 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
605
614
  };
606
615
  };
607
616
  export type ModelLibraryKey = keyof typeof MODEL_LIBRARIES_UI_ELEMENTS;
608
- export declare const ALL_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "depth-anything-v2" | "diffree" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
609
- export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "depth-anything-v2" | "diffree" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
617
+ export declare const ALL_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "depth-anything-v2" | "diffree" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
618
+ export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "depth-anything-v2" | "diffree" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
610
619
  //# sourceMappingURL=model-libraries.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAwiBI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,o/BAAgE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,o/BAQ1B,CAAC"}
1
+ {"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAijBI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,igCAAgE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,igCAQ1B,CAAC"}
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@huggingface/tasks",
3
3
  "packageManager": "pnpm@8.10.5",
4
- "version": "0.11.8",
4
+ "version": "0.11.9",
5
5
  "description": "List of ML tasks for huggingface.co/tasks",
6
6
  "repository": "https://github.com/huggingface/huggingface.js.git",
7
7
  "publishConfig": {
package/src/local-apps.ts CHANGED
@@ -99,6 +99,29 @@ const snippetLlamacpp = (model: ModelData, filepath?: string): LocalAppSnippet[]
99
99
  ];
100
100
  };
101
101
 
102
+ const snippetLocalAI = (model: ModelData, filepath?: string): LocalAppSnippet[] => {
103
+ const command = (binary: string) =>
104
+ ["# Load and run the model:", `${binary} huggingface://${model.id}/${filepath ?? "{{GGUF_FILE}}"}`].join("\n");
105
+ return [
106
+ {
107
+ title: "Install from binary",
108
+ setup: "curl https://localai.io/install.sh | sh",
109
+ content: command("local-ai run"),
110
+ },
111
+ {
112
+ title: "Use Docker images",
113
+ setup: [
114
+ // prettier-ignore
115
+ "# Pull the image:",
116
+ "docker pull localai/localai:latest-cpu",
117
+ ].join("\n"),
118
+ content: command(
119
+ "docker run -p 8080:8080 --name localai -v $PWD/models:/build/models localai/localai:latest-cpu"
120
+ ),
121
+ },
122
+ ];
123
+ };
124
+
102
125
  /**
103
126
  * Add your new local app here.
104
127
  *
@@ -126,6 +149,13 @@ export const LOCAL_APPS = {
126
149
  deeplink: (model, filepath) =>
127
150
  new URL(`lmstudio://open_from_hf?model=${model.id}${filepath ? `&file=${filepath}` : ""}`),
128
151
  },
152
+ localai: {
153
+ prettyLabel: "LocalAI",
154
+ docsUrl: "https://github.com/mudler/LocalAI",
155
+ mainTask: "text-generation",
156
+ displayOnModelPage: isGgufModel,
157
+ snippet: snippetLocalAI,
158
+ },
129
159
  jan: {
130
160
  prettyLabel: "Jan",
131
161
  docsUrl: "https://jan.ai",
@@ -204,6 +234,21 @@ export const LOCAL_APPS = {
204
234
  displayOnModelPage: (model) => model.library_name === "diffusers" && model.pipeline_tag === "text-to-image",
205
235
  deeplink: (model) => new URL(`diffusionbee://open_from_hf?model=${model.id}`),
206
236
  },
237
+ joyfusion: {
238
+ prettyLabel: "JoyFusion",
239
+ docsUrl: "https://joyfusion.app",
240
+ mainTask: "text-to-image",
241
+ macOSOnly: true,
242
+ displayOnModelPage: (model) => model.tags.includes("coreml") && model.pipeline_tag === "text-to-image",
243
+ deeplink: (model) => new URL(`https://joyfusion.app/import_from_hf?repo_id=${model.id}`),
244
+ },
245
+ invoke: {
246
+ prettyLabel: "Invoke",
247
+ docsUrl: "https://github.com/invoke-ai/InvokeAI",
248
+ mainTask: "text-to-image",
249
+ displayOnModelPage: (model) => model.library_name === "diffusers" && model.pipeline_tag === "text-to-image",
250
+ deeplink: (model) => new URL(`https://models.invoke.ai/huggingface/${model.id}`),
251
+ },
207
252
  } satisfies Record<string, LocalApp>;
208
253
 
209
254
  export type LocalAppKey = keyof typeof LOCAL_APPS;
@@ -525,6 +525,35 @@ export const fastai = (model: ModelData): string[] => [
525
525
  learn = from_pretrained_fastai("${model.id}")`,
526
526
  ];
527
527
 
528
+ export const sam2 = (model: ModelData): string[] => {
529
+ const image_predictor = `# Use SAM2 with images
530
+ import torch
531
+ from sam2.sam2_image_predictor import SAM2ImagePredictor
532
+
533
+ predictor = SAM2ImagePredictor.from_pretrained(${model.id})
534
+
535
+ with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16):
536
+ predictor.set_image(<your_image>)
537
+ masks, _, _ = predictor.predict(<input_prompts>)`;
538
+
539
+ const video_predictor = `# Use SAM2 with videos
540
+ import torch
541
+ from sam2.sam2_video_predictor import SAM2VideoPredictor
542
+
543
+ predictor = SAM2VideoPredictor.from_pretrained(${model.id})
544
+
545
+ with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16):
546
+ state = predictor.init_state(<your_video>)
547
+
548
+ # add new prompts and instantly get the output on the same frame
549
+ frame_idx, object_ids, masks = predictor.add_new_points(state, <your_prompts>):
550
+
551
+ # propagate the prompts to get masklets throughout the video
552
+ for frame_idx, object_ids, masks in predictor.propagate_in_video(state):
553
+ ...`;
554
+ return [image_predictor, video_predictor];
555
+ };
556
+
528
557
  export const sampleFactory = (model: ModelData): string[] => [
529
558
  `python -m sample_factory.huggingface.load_from_hub -r ${model.id} -d ./train_dir`,
530
559
  ];
@@ -435,6 +435,14 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
435
435
  filter: false,
436
436
  countDownloads: `path:"tokenizer.model"`,
437
437
  },
438
+ refiners: {
439
+ prettyLabel: "Refiners",
440
+ repoName: "Refiners",
441
+ repoUrl: "https://github.com/finegrain-ai/refiners",
442
+ docsUrl: "https://refine.rs/",
443
+ filter: false,
444
+ countDownloads: `path:"model.safetensors"`,
445
+ },
438
446
  saelens: {
439
447
  prettyLabel: "SAELens",
440
448
  repoName: "SAELens",
@@ -447,6 +455,7 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
447
455
  repoName: "sam2",
448
456
  repoUrl: "https://github.com/facebookresearch/segment-anything-2",
449
457
  filter: false,
458
+ snippets: snippets.sam2,
450
459
  countDownloads: `path_extension:"pt"`,
451
460
  },
452
461
  "sample-factory": {