@huggingface/tasks 0.11.10 → 0.11.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -4461,6 +4461,43 @@ var diffusers = (model) => {
4461
4461
  return diffusers_default(model);
4462
4462
  }
4463
4463
  };
4464
+ var cartesia_pytorch = (model) => [
4465
+ `# pip install --no-binary :all: cartesia-pytorch
4466
+ from cartesia_pytorch import ReneLMHeadModel
4467
+ from transformers import AutoTokenizer
4468
+
4469
+ model = ReneLMHeadModel.from_pretrained("${model.id}")
4470
+ tokenizer = AutoTokenizer.from_pretrained("allenai/OLMo-1B-hf")
4471
+
4472
+ in_message = ["Rene Descartes was"]
4473
+ inputs = tokenizer(in_message, return_tensors="pt")
4474
+
4475
+ outputs = model.generate(inputs.input_ids, max_length=50, top_k=100, top_p=0.99)
4476
+ out_message = tokenizer.batch_decode(outputs, skip_special_tokens=True)[0]
4477
+
4478
+ print(out_message)
4479
+ )`
4480
+ ];
4481
+ var cartesia_mlx = (model) => [
4482
+ `import mlx.core as mx
4483
+ import cartesia_mlx as cmx
4484
+
4485
+ model = cmx.from_pretrained("${model.id}")
4486
+ model.set_dtype(mx.float32)
4487
+
4488
+ prompt = "Rene Descartes was"
4489
+
4490
+ for text in model.generate(
4491
+ prompt,
4492
+ max_tokens=500,
4493
+ eval_every_n=5,
4494
+ verbose=True,
4495
+ top_p=0.99,
4496
+ temperature=0.85,
4497
+ ):
4498
+ print(text, end="", flush=True)
4499
+ `
4500
+ ];
4464
4501
  var edsnlp = (model) => {
4465
4502
  const packageName = nameWithoutNamespace(model.id).replaceAll("-", "_");
4466
4503
  return [
@@ -4551,12 +4588,12 @@ llm = Llama.from_pretrained(
4551
4588
  )
4552
4589
 
4553
4590
  llm.create_chat_completion(
4554
- messages = [
4555
- {
4556
- "role": "user",
4557
- "content": "What is the capital of France?"
4558
- }
4559
- ]
4591
+ messages = [
4592
+ {
4593
+ "role": "user",
4594
+ "content": "What is the capital of France?"
4595
+ }
4596
+ ]
4560
4597
  )`
4561
4598
  ];
4562
4599
  var tf_keras = (model) => [
@@ -4699,6 +4736,20 @@ sae, cfg_dict, sparsity = SAE.from_pretrained(
4699
4736
  sae_id = "SAE_ID", # e.g., "blocks.8.hook_resid_pre". Won't always be a hook point
4700
4737
  )`
4701
4738
  ];
4739
+ var seed_story = () => [
4740
+ `# seed_story_cfg_path refers to 'https://github.com/TencentARC/SEED-Story/blob/master/configs/clm_models/agent_7b_sft.yaml'
4741
+ # llm_cfg_path refers to 'https://github.com/TencentARC/SEED-Story/blob/master/configs/clm_models/llama2chat7b_lora.yaml'
4742
+ from omegaconf import OmegaConf
4743
+ import hydra
4744
+
4745
+ # load Llama2
4746
+ llm_cfg = OmegaConf.load(llm_cfg_path)
4747
+ llm = hydra.utils.instantiate(llm_cfg, torch_dtype="fp16")
4748
+
4749
+ # initialize seed_story
4750
+ seed_story_cfg = OmegaConf.load(seed_story_cfg_path)
4751
+ seed_story = hydra.utils.instantiate(seed_story_cfg, llm=llm) `
4752
+ ];
4702
4753
  var skopsPickle = (model, modelFile) => {
4703
4754
  return [
4704
4755
  `import joblib
@@ -5197,6 +5248,13 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
5197
5248
  filter: false,
5198
5249
  countDownloads: `path:"adapter_config.json"`
5199
5250
  },
5251
+ deepforest: {
5252
+ prettyLabel: "DeepForest",
5253
+ repoName: "deepforest",
5254
+ docsUrl: "https://deepforest.readthedocs.io/en/latest/",
5255
+ repoUrl: "https://github.com/weecology/DeepForest",
5256
+ countDownloads: `path_extension:"pt" OR path_extension:"pl"`
5257
+ },
5200
5258
  "depth-anything-v2": {
5201
5259
  prettyLabel: "DepthAnythingV2",
5202
5260
  repoName: "Depth Anything V2",
@@ -5226,6 +5284,18 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
5226
5284
  repoName: "doctr",
5227
5285
  repoUrl: "https://github.com/mindee/doctr"
5228
5286
  },
5287
+ cartesia_pytorch: {
5288
+ prettyLabel: "Cartesia Pytorch",
5289
+ repoName: "Cartesia Pytorch",
5290
+ repoUrl: "https://github.com/cartesia-ai/cartesia_pytorch",
5291
+ snippets: cartesia_pytorch
5292
+ },
5293
+ cartesia_mlx: {
5294
+ prettyLabel: "Cartesia MLX",
5295
+ repoName: "Cartesia MLX",
5296
+ repoUrl: "https://github.com/cartesia-ai/cartesia_mlx",
5297
+ snippets: cartesia_mlx
5298
+ },
5229
5299
  edsnlp: {
5230
5300
  prettyLabel: "EDS-NLP",
5231
5301
  repoName: "edsnlp",
@@ -5586,6 +5656,14 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
5586
5656
  filter: false,
5587
5657
  countDownloads: `path_extension:"safetensors"`
5588
5658
  },
5659
+ "seed-story": {
5660
+ prettyLabel: "SEED-Story",
5661
+ repoName: "SEED-Story",
5662
+ repoUrl: "https://github.com/TencentARC/SEED-Story",
5663
+ filter: false,
5664
+ countDownloads: `path:"cvlm_llama2_tokenizer/tokenizer.model"`,
5665
+ snippets: seed_story
5666
+ },
5589
5667
  "stable-baselines3": {
5590
5668
  prettyLabel: "stable-baselines3",
5591
5669
  repoName: "stable-baselines3",
@@ -5814,8 +5892,7 @@ var snippetBasic = (model, accessToken) => `curl https://api-inference.huggingfa
5814
5892
  -X POST \\
5815
5893
  -d '{"inputs": ${getModelInputSnippet(model, true)}}' \\
5816
5894
  -H 'Content-Type: application/json' \\
5817
- -H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}"
5818
- `;
5895
+ -H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}"`;
5819
5896
  var snippetTextGeneration = (model, accessToken) => {
5820
5897
  if (model.config?.tokenizer_config?.chat_template) {
5821
5898
  return `curl 'https://api-inference.huggingface.co/models/${model.id}/v1/chat/completions' \\
@@ -5836,13 +5913,11 @@ var snippetZeroShotClassification = (model, accessToken) => `curl https://api-in
5836
5913
  -X POST \\
5837
5914
  -d '{"inputs": ${getModelInputSnippet(model, true)}, "parameters": {"candidate_labels": ["refund", "legal", "faq"]}}' \\
5838
5915
  -H 'Content-Type: application/json' \\
5839
- -H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}"
5840
- `;
5916
+ -H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}"`;
5841
5917
  var snippetFile = (model, accessToken) => `curl https://api-inference.huggingface.co/models/${model.id} \\
5842
5918
  -X POST \\
5843
5919
  --data-binary '@${getModelInputSnippet(model, true, true)}' \\
5844
- -H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}"
5845
- `;
5920
+ -H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}"`;
5846
5921
  var curlSnippets = {
5847
5922
  // Same order as in js/src/lib/interfaces/Types.ts
5848
5923
  "text-classification": snippetBasic,
@@ -5903,8 +5978,7 @@ for message in client.chat_completion(
5903
5978
  max_tokens=500,
5904
5979
  stream=True,
5905
5980
  ):
5906
- print(message.choices[0].delta.content, end="")
5907
- `;
5981
+ print(message.choices[0].delta.content, end="")`;
5908
5982
  var snippetZeroShotClassification2 = (model) => `def query(payload):
5909
5983
  response = requests.post(API_URL, headers=headers, json=payload)
5910
5984
  return response.json()
@@ -6082,8 +6156,7 @@ for await (const chunk of inference.chatCompletionStream({
6082
6156
  max_tokens: 500,
6083
6157
  })) {
6084
6158
  process.stdout.write(chunk.choices[0]?.delta?.content || "");
6085
- }
6086
- `;
6159
+ }`;
6087
6160
  } else {
6088
6161
  return snippetBasic3(model, accessToken);
6089
6162
  }
@@ -6348,6 +6421,14 @@ var SKUS = {
6348
6421
  tflops: 12.74,
6349
6422
  memory: [12, 8]
6350
6423
  },
6424
+ "RTX 2080 Ti": {
6425
+ tflops: 26.9,
6426
+ memory: [11]
6427
+ },
6428
+ "RTX 2080": {
6429
+ tflops: 20.14,
6430
+ memory: [8]
6431
+ },
6351
6432
  "RTX 2070": {
6352
6433
  tflops: 14.93,
6353
6434
  memory: [8]
@@ -6615,8 +6696,8 @@ var SKUS = {
6615
6696
  };
6616
6697
 
6617
6698
  // src/local-apps.ts
6618
- function isGgufModel(model) {
6619
- return model.tags.includes("gguf");
6699
+ function isLlamaCppGgufModel(model) {
6700
+ return !!model.gguf?.context_length;
6620
6701
  }
6621
6702
  var snippetLlamacpp = (model, filepath) => {
6622
6703
  const command = (binary) => [
@@ -6679,51 +6760,51 @@ var LOCAL_APPS = {
6679
6760
  prettyLabel: "llama.cpp",
6680
6761
  docsUrl: "https://github.com/ggerganov/llama.cpp",
6681
6762
  mainTask: "text-generation",
6682
- displayOnModelPage: isGgufModel,
6763
+ displayOnModelPage: isLlamaCppGgufModel,
6683
6764
  snippet: snippetLlamacpp
6684
6765
  },
6685
6766
  lmstudio: {
6686
6767
  prettyLabel: "LM Studio",
6687
6768
  docsUrl: "https://lmstudio.ai",
6688
6769
  mainTask: "text-generation",
6689
- displayOnModelPage: isGgufModel,
6770
+ displayOnModelPage: isLlamaCppGgufModel,
6690
6771
  deeplink: (model, filepath) => new URL(`lmstudio://open_from_hf?model=${model.id}${filepath ? `&file=${filepath}` : ""}`)
6691
6772
  },
6692
6773
  localai: {
6693
6774
  prettyLabel: "LocalAI",
6694
6775
  docsUrl: "https://github.com/mudler/LocalAI",
6695
6776
  mainTask: "text-generation",
6696
- displayOnModelPage: isGgufModel,
6777
+ displayOnModelPage: isLlamaCppGgufModel,
6697
6778
  snippet: snippetLocalAI
6698
6779
  },
6699
6780
  jan: {
6700
6781
  prettyLabel: "Jan",
6701
6782
  docsUrl: "https://jan.ai",
6702
6783
  mainTask: "text-generation",
6703
- displayOnModelPage: isGgufModel,
6784
+ displayOnModelPage: isLlamaCppGgufModel,
6704
6785
  deeplink: (model) => new URL(`jan://models/huggingface/${model.id}`)
6705
6786
  },
6706
6787
  backyard: {
6707
6788
  prettyLabel: "Backyard AI",
6708
6789
  docsUrl: "https://backyard.ai",
6709
6790
  mainTask: "text-generation",
6710
- displayOnModelPage: isGgufModel,
6791
+ displayOnModelPage: isLlamaCppGgufModel,
6711
6792
  deeplink: (model) => new URL(`https://backyard.ai/hf/model/${model.id}`)
6712
6793
  },
6713
6794
  sanctum: {
6714
6795
  prettyLabel: "Sanctum",
6715
6796
  docsUrl: "https://sanctum.ai",
6716
6797
  mainTask: "text-generation",
6717
- displayOnModelPage: isGgufModel,
6798
+ displayOnModelPage: isLlamaCppGgufModel,
6718
6799
  deeplink: (model) => new URL(`sanctum://open_from_hf?model=${model.id}`)
6719
6800
  },
6720
6801
  jellybox: {
6721
6802
  prettyLabel: "Jellybox",
6722
6803
  docsUrl: "https://jellybox.com",
6723
6804
  mainTask: "text-generation",
6724
- displayOnModelPage: (model) => isGgufModel(model) || model.library_name === "diffusers" && model.tags.includes("safetensors") && (model.pipeline_tag === "text-to-image" || model.tags.includes("lora")),
6805
+ displayOnModelPage: (model) => isLlamaCppGgufModel(model) || model.library_name === "diffusers" && model.tags.includes("safetensors") && (model.pipeline_tag === "text-to-image" || model.tags.includes("lora")),
6725
6806
  deeplink: (model) => {
6726
- if (isGgufModel(model)) {
6807
+ if (isLlamaCppGgufModel(model)) {
6727
6808
  return new URL(`jellybox://llm/models/huggingface/LLM/${model.id}`);
6728
6809
  } else if (model.tags.includes("lora")) {
6729
6810
  return new URL(`jellybox://image/models/huggingface/ImageLora/${model.id}`);
@@ -6736,7 +6817,7 @@ var LOCAL_APPS = {
6736
6817
  prettyLabel: "Msty",
6737
6818
  docsUrl: "https://msty.app",
6738
6819
  mainTask: "text-generation",
6739
- displayOnModelPage: isGgufModel,
6820
+ displayOnModelPage: isLlamaCppGgufModel,
6740
6821
  deeplink: (model) => new URL(`msty://models/search/hf/${model.id}`)
6741
6822
  },
6742
6823
  recursechat: {
@@ -6744,7 +6825,7 @@ var LOCAL_APPS = {
6744
6825
  docsUrl: "https://recurse.chat",
6745
6826
  mainTask: "text-generation",
6746
6827
  macOSOnly: true,
6747
- displayOnModelPage: isGgufModel,
6828
+ displayOnModelPage: isLlamaCppGgufModel,
6748
6829
  deeplink: (model) => new URL(`recursechat://new-hf-gguf-model?hf-model-id=${model.id}`)
6749
6830
  },
6750
6831
  drawthings: {
@@ -6767,7 +6848,7 @@ var LOCAL_APPS = {
6767
6848
  mainTask: "text-to-image",
6768
6849
  macOSOnly: true,
6769
6850
  displayOnModelPage: (model) => model.library_name === "diffusers" && model.pipeline_tag === "text-to-image",
6770
- deeplink: (model) => new URL(`diffusionbee://open_from_hf?model=${model.id}`)
6851
+ deeplink: (model) => new URL(`https://diffusionbee.com/huggingface_import?model_id=${model.id}`)
6771
6852
  },
6772
6853
  joyfusion: {
6773
6854
  prettyLabel: "JoyFusion",
@@ -6841,6 +6922,12 @@ var DATASET_LIBRARIES_UI_ELEMENTS = {
6841
6922
  repoName: "polars",
6842
6923
  repoUrl: "https://github.com/pola-rs/polars",
6843
6924
  docsUrl: "https://huggingface.co/docs/hub/datasets-polars"
6925
+ },
6926
+ duckdb: {
6927
+ prettyLabel: "DuckDB",
6928
+ repoName: "duckdb",
6929
+ repoUrl: "https://github.com/duckdb/duckdb",
6930
+ docsUrl: "https://huggingface.co/docs/hub/datasets-duckdb"
6844
6931
  }
6845
6932
  };
6846
6933
  // Annotate the CommonJS export names for ESM import in node:
package/dist/index.js CHANGED
@@ -4423,6 +4423,43 @@ var diffusers = (model) => {
4423
4423
  return diffusers_default(model);
4424
4424
  }
4425
4425
  };
4426
+ var cartesia_pytorch = (model) => [
4427
+ `# pip install --no-binary :all: cartesia-pytorch
4428
+ from cartesia_pytorch import ReneLMHeadModel
4429
+ from transformers import AutoTokenizer
4430
+
4431
+ model = ReneLMHeadModel.from_pretrained("${model.id}")
4432
+ tokenizer = AutoTokenizer.from_pretrained("allenai/OLMo-1B-hf")
4433
+
4434
+ in_message = ["Rene Descartes was"]
4435
+ inputs = tokenizer(in_message, return_tensors="pt")
4436
+
4437
+ outputs = model.generate(inputs.input_ids, max_length=50, top_k=100, top_p=0.99)
4438
+ out_message = tokenizer.batch_decode(outputs, skip_special_tokens=True)[0]
4439
+
4440
+ print(out_message)
4441
+ )`
4442
+ ];
4443
+ var cartesia_mlx = (model) => [
4444
+ `import mlx.core as mx
4445
+ import cartesia_mlx as cmx
4446
+
4447
+ model = cmx.from_pretrained("${model.id}")
4448
+ model.set_dtype(mx.float32)
4449
+
4450
+ prompt = "Rene Descartes was"
4451
+
4452
+ for text in model.generate(
4453
+ prompt,
4454
+ max_tokens=500,
4455
+ eval_every_n=5,
4456
+ verbose=True,
4457
+ top_p=0.99,
4458
+ temperature=0.85,
4459
+ ):
4460
+ print(text, end="", flush=True)
4461
+ `
4462
+ ];
4426
4463
  var edsnlp = (model) => {
4427
4464
  const packageName = nameWithoutNamespace(model.id).replaceAll("-", "_");
4428
4465
  return [
@@ -4513,12 +4550,12 @@ llm = Llama.from_pretrained(
4513
4550
  )
4514
4551
 
4515
4552
  llm.create_chat_completion(
4516
- messages = [
4517
- {
4518
- "role": "user",
4519
- "content": "What is the capital of France?"
4520
- }
4521
- ]
4553
+ messages = [
4554
+ {
4555
+ "role": "user",
4556
+ "content": "What is the capital of France?"
4557
+ }
4558
+ ]
4522
4559
  )`
4523
4560
  ];
4524
4561
  var tf_keras = (model) => [
@@ -4661,6 +4698,20 @@ sae, cfg_dict, sparsity = SAE.from_pretrained(
4661
4698
  sae_id = "SAE_ID", # e.g., "blocks.8.hook_resid_pre". Won't always be a hook point
4662
4699
  )`
4663
4700
  ];
4701
+ var seed_story = () => [
4702
+ `# seed_story_cfg_path refers to 'https://github.com/TencentARC/SEED-Story/blob/master/configs/clm_models/agent_7b_sft.yaml'
4703
+ # llm_cfg_path refers to 'https://github.com/TencentARC/SEED-Story/blob/master/configs/clm_models/llama2chat7b_lora.yaml'
4704
+ from omegaconf import OmegaConf
4705
+ import hydra
4706
+
4707
+ # load Llama2
4708
+ llm_cfg = OmegaConf.load(llm_cfg_path)
4709
+ llm = hydra.utils.instantiate(llm_cfg, torch_dtype="fp16")
4710
+
4711
+ # initialize seed_story
4712
+ seed_story_cfg = OmegaConf.load(seed_story_cfg_path)
4713
+ seed_story = hydra.utils.instantiate(seed_story_cfg, llm=llm) `
4714
+ ];
4664
4715
  var skopsPickle = (model, modelFile) => {
4665
4716
  return [
4666
4717
  `import joblib
@@ -5159,6 +5210,13 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
5159
5210
  filter: false,
5160
5211
  countDownloads: `path:"adapter_config.json"`
5161
5212
  },
5213
+ deepforest: {
5214
+ prettyLabel: "DeepForest",
5215
+ repoName: "deepforest",
5216
+ docsUrl: "https://deepforest.readthedocs.io/en/latest/",
5217
+ repoUrl: "https://github.com/weecology/DeepForest",
5218
+ countDownloads: `path_extension:"pt" OR path_extension:"pl"`
5219
+ },
5162
5220
  "depth-anything-v2": {
5163
5221
  prettyLabel: "DepthAnythingV2",
5164
5222
  repoName: "Depth Anything V2",
@@ -5188,6 +5246,18 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
5188
5246
  repoName: "doctr",
5189
5247
  repoUrl: "https://github.com/mindee/doctr"
5190
5248
  },
5249
+ cartesia_pytorch: {
5250
+ prettyLabel: "Cartesia Pytorch",
5251
+ repoName: "Cartesia Pytorch",
5252
+ repoUrl: "https://github.com/cartesia-ai/cartesia_pytorch",
5253
+ snippets: cartesia_pytorch
5254
+ },
5255
+ cartesia_mlx: {
5256
+ prettyLabel: "Cartesia MLX",
5257
+ repoName: "Cartesia MLX",
5258
+ repoUrl: "https://github.com/cartesia-ai/cartesia_mlx",
5259
+ snippets: cartesia_mlx
5260
+ },
5191
5261
  edsnlp: {
5192
5262
  prettyLabel: "EDS-NLP",
5193
5263
  repoName: "edsnlp",
@@ -5548,6 +5618,14 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
5548
5618
  filter: false,
5549
5619
  countDownloads: `path_extension:"safetensors"`
5550
5620
  },
5621
+ "seed-story": {
5622
+ prettyLabel: "SEED-Story",
5623
+ repoName: "SEED-Story",
5624
+ repoUrl: "https://github.com/TencentARC/SEED-Story",
5625
+ filter: false,
5626
+ countDownloads: `path:"cvlm_llama2_tokenizer/tokenizer.model"`,
5627
+ snippets: seed_story
5628
+ },
5551
5629
  "stable-baselines3": {
5552
5630
  prettyLabel: "stable-baselines3",
5553
5631
  repoName: "stable-baselines3",
@@ -5776,8 +5854,7 @@ var snippetBasic = (model, accessToken) => `curl https://api-inference.huggingfa
5776
5854
  -X POST \\
5777
5855
  -d '{"inputs": ${getModelInputSnippet(model, true)}}' \\
5778
5856
  -H 'Content-Type: application/json' \\
5779
- -H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}"
5780
- `;
5857
+ -H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}"`;
5781
5858
  var snippetTextGeneration = (model, accessToken) => {
5782
5859
  if (model.config?.tokenizer_config?.chat_template) {
5783
5860
  return `curl 'https://api-inference.huggingface.co/models/${model.id}/v1/chat/completions' \\
@@ -5798,13 +5875,11 @@ var snippetZeroShotClassification = (model, accessToken) => `curl https://api-in
5798
5875
  -X POST \\
5799
5876
  -d '{"inputs": ${getModelInputSnippet(model, true)}, "parameters": {"candidate_labels": ["refund", "legal", "faq"]}}' \\
5800
5877
  -H 'Content-Type: application/json' \\
5801
- -H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}"
5802
- `;
5878
+ -H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}"`;
5803
5879
  var snippetFile = (model, accessToken) => `curl https://api-inference.huggingface.co/models/${model.id} \\
5804
5880
  -X POST \\
5805
5881
  --data-binary '@${getModelInputSnippet(model, true, true)}' \\
5806
- -H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}"
5807
- `;
5882
+ -H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}"`;
5808
5883
  var curlSnippets = {
5809
5884
  // Same order as in js/src/lib/interfaces/Types.ts
5810
5885
  "text-classification": snippetBasic,
@@ -5865,8 +5940,7 @@ for message in client.chat_completion(
5865
5940
  max_tokens=500,
5866
5941
  stream=True,
5867
5942
  ):
5868
- print(message.choices[0].delta.content, end="")
5869
- `;
5943
+ print(message.choices[0].delta.content, end="")`;
5870
5944
  var snippetZeroShotClassification2 = (model) => `def query(payload):
5871
5945
  response = requests.post(API_URL, headers=headers, json=payload)
5872
5946
  return response.json()
@@ -6044,8 +6118,7 @@ for await (const chunk of inference.chatCompletionStream({
6044
6118
  max_tokens: 500,
6045
6119
  })) {
6046
6120
  process.stdout.write(chunk.choices[0]?.delta?.content || "");
6047
- }
6048
- `;
6121
+ }`;
6049
6122
  } else {
6050
6123
  return snippetBasic3(model, accessToken);
6051
6124
  }
@@ -6310,6 +6383,14 @@ var SKUS = {
6310
6383
  tflops: 12.74,
6311
6384
  memory: [12, 8]
6312
6385
  },
6386
+ "RTX 2080 Ti": {
6387
+ tflops: 26.9,
6388
+ memory: [11]
6389
+ },
6390
+ "RTX 2080": {
6391
+ tflops: 20.14,
6392
+ memory: [8]
6393
+ },
6313
6394
  "RTX 2070": {
6314
6395
  tflops: 14.93,
6315
6396
  memory: [8]
@@ -6577,8 +6658,8 @@ var SKUS = {
6577
6658
  };
6578
6659
 
6579
6660
  // src/local-apps.ts
6580
- function isGgufModel(model) {
6581
- return model.tags.includes("gguf");
6661
+ function isLlamaCppGgufModel(model) {
6662
+ return !!model.gguf?.context_length;
6582
6663
  }
6583
6664
  var snippetLlamacpp = (model, filepath) => {
6584
6665
  const command = (binary) => [
@@ -6641,51 +6722,51 @@ var LOCAL_APPS = {
6641
6722
  prettyLabel: "llama.cpp",
6642
6723
  docsUrl: "https://github.com/ggerganov/llama.cpp",
6643
6724
  mainTask: "text-generation",
6644
- displayOnModelPage: isGgufModel,
6725
+ displayOnModelPage: isLlamaCppGgufModel,
6645
6726
  snippet: snippetLlamacpp
6646
6727
  },
6647
6728
  lmstudio: {
6648
6729
  prettyLabel: "LM Studio",
6649
6730
  docsUrl: "https://lmstudio.ai",
6650
6731
  mainTask: "text-generation",
6651
- displayOnModelPage: isGgufModel,
6732
+ displayOnModelPage: isLlamaCppGgufModel,
6652
6733
  deeplink: (model, filepath) => new URL(`lmstudio://open_from_hf?model=${model.id}${filepath ? `&file=${filepath}` : ""}`)
6653
6734
  },
6654
6735
  localai: {
6655
6736
  prettyLabel: "LocalAI",
6656
6737
  docsUrl: "https://github.com/mudler/LocalAI",
6657
6738
  mainTask: "text-generation",
6658
- displayOnModelPage: isGgufModel,
6739
+ displayOnModelPage: isLlamaCppGgufModel,
6659
6740
  snippet: snippetLocalAI
6660
6741
  },
6661
6742
  jan: {
6662
6743
  prettyLabel: "Jan",
6663
6744
  docsUrl: "https://jan.ai",
6664
6745
  mainTask: "text-generation",
6665
- displayOnModelPage: isGgufModel,
6746
+ displayOnModelPage: isLlamaCppGgufModel,
6666
6747
  deeplink: (model) => new URL(`jan://models/huggingface/${model.id}`)
6667
6748
  },
6668
6749
  backyard: {
6669
6750
  prettyLabel: "Backyard AI",
6670
6751
  docsUrl: "https://backyard.ai",
6671
6752
  mainTask: "text-generation",
6672
- displayOnModelPage: isGgufModel,
6753
+ displayOnModelPage: isLlamaCppGgufModel,
6673
6754
  deeplink: (model) => new URL(`https://backyard.ai/hf/model/${model.id}`)
6674
6755
  },
6675
6756
  sanctum: {
6676
6757
  prettyLabel: "Sanctum",
6677
6758
  docsUrl: "https://sanctum.ai",
6678
6759
  mainTask: "text-generation",
6679
- displayOnModelPage: isGgufModel,
6760
+ displayOnModelPage: isLlamaCppGgufModel,
6680
6761
  deeplink: (model) => new URL(`sanctum://open_from_hf?model=${model.id}`)
6681
6762
  },
6682
6763
  jellybox: {
6683
6764
  prettyLabel: "Jellybox",
6684
6765
  docsUrl: "https://jellybox.com",
6685
6766
  mainTask: "text-generation",
6686
- displayOnModelPage: (model) => isGgufModel(model) || model.library_name === "diffusers" && model.tags.includes("safetensors") && (model.pipeline_tag === "text-to-image" || model.tags.includes("lora")),
6767
+ displayOnModelPage: (model) => isLlamaCppGgufModel(model) || model.library_name === "diffusers" && model.tags.includes("safetensors") && (model.pipeline_tag === "text-to-image" || model.tags.includes("lora")),
6687
6768
  deeplink: (model) => {
6688
- if (isGgufModel(model)) {
6769
+ if (isLlamaCppGgufModel(model)) {
6689
6770
  return new URL(`jellybox://llm/models/huggingface/LLM/${model.id}`);
6690
6771
  } else if (model.tags.includes("lora")) {
6691
6772
  return new URL(`jellybox://image/models/huggingface/ImageLora/${model.id}`);
@@ -6698,7 +6779,7 @@ var LOCAL_APPS = {
6698
6779
  prettyLabel: "Msty",
6699
6780
  docsUrl: "https://msty.app",
6700
6781
  mainTask: "text-generation",
6701
- displayOnModelPage: isGgufModel,
6782
+ displayOnModelPage: isLlamaCppGgufModel,
6702
6783
  deeplink: (model) => new URL(`msty://models/search/hf/${model.id}`)
6703
6784
  },
6704
6785
  recursechat: {
@@ -6706,7 +6787,7 @@ var LOCAL_APPS = {
6706
6787
  docsUrl: "https://recurse.chat",
6707
6788
  mainTask: "text-generation",
6708
6789
  macOSOnly: true,
6709
- displayOnModelPage: isGgufModel,
6790
+ displayOnModelPage: isLlamaCppGgufModel,
6710
6791
  deeplink: (model) => new URL(`recursechat://new-hf-gguf-model?hf-model-id=${model.id}`)
6711
6792
  },
6712
6793
  drawthings: {
@@ -6729,7 +6810,7 @@ var LOCAL_APPS = {
6729
6810
  mainTask: "text-to-image",
6730
6811
  macOSOnly: true,
6731
6812
  displayOnModelPage: (model) => model.library_name === "diffusers" && model.pipeline_tag === "text-to-image",
6732
- deeplink: (model) => new URL(`diffusionbee://open_from_hf?model=${model.id}`)
6813
+ deeplink: (model) => new URL(`https://diffusionbee.com/huggingface_import?model_id=${model.id}`)
6733
6814
  },
6734
6815
  joyfusion: {
6735
6816
  prettyLabel: "JoyFusion",
@@ -6803,6 +6884,12 @@ var DATASET_LIBRARIES_UI_ELEMENTS = {
6803
6884
  repoName: "polars",
6804
6885
  repoUrl: "https://github.com/pola-rs/polars",
6805
6886
  docsUrl: "https://huggingface.co/docs/hub/datasets-polars"
6887
+ },
6888
+ duckdb: {
6889
+ prettyLabel: "DuckDB",
6890
+ repoName: "duckdb",
6891
+ repoUrl: "https://github.com/duckdb/duckdb",
6892
+ docsUrl: "https://huggingface.co/docs/hub/datasets-duckdb"
6806
6893
  }
6807
6894
  };
6808
6895
  export {
@@ -76,6 +76,12 @@ export declare const DATASET_LIBRARIES_UI_ELEMENTS: {
76
76
  repoUrl: string;
77
77
  docsUrl: string;
78
78
  };
79
+ duckdb: {
80
+ prettyLabel: string;
81
+ repoName: string;
82
+ repoUrl: string;
83
+ docsUrl: string;
84
+ };
79
85
  };
80
86
  export type DatasetLibraryKey = keyof typeof DATASET_LIBRARIES_UI_ELEMENTS;
81
87
  //# sourceMappingURL=dataset-libraries.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"dataset-libraries.d.ts","sourceRoot":"","sources":["../../src/dataset-libraries.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,MAAM,WAAW,uBAAuB;IACvC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;CACjB;AAED,eAAO,MAAM,6BAA6B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAuDS,CAAC;AAGpD,MAAM,MAAM,iBAAiB,GAAG,MAAM,OAAO,6BAA6B,CAAC"}
1
+ {"version":3,"file":"dataset-libraries.d.ts","sourceRoot":"","sources":["../../src/dataset-libraries.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,MAAM,WAAW,uBAAuB;IACvC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;CACjB;AAED,eAAO,MAAM,6BAA6B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA6DS,CAAC;AAGpD,MAAM,MAAM,iBAAiB,GAAG,MAAM,OAAO,6BAA6B,CAAC"}
@@ -160,6 +160,14 @@ export declare const SKUS: {
160
160
  tflops: number;
161
161
  memory: number[];
162
162
  };
163
+ "RTX 2080 Ti": {
164
+ tflops: number;
165
+ memory: number[];
166
+ };
167
+ "RTX 2080": {
168
+ tflops: number;
169
+ memory: number[];
170
+ };
163
171
  "RTX 2070": {
164
172
  tflops: number;
165
173
  memory: number[];
@@ -1 +1 @@
1
- {"version":3,"file":"hardware.d.ts","sourceRoot":"","sources":["../../src/hardware.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,eAAO,MAAM,iDAAiD,QAAW,CAAC;AAC1E,eAAO,MAAM,yDAAyD,QAAW,CAAC;AAClF,eAAO,MAAM,oCAAoC,QAAU,CAAC;AAE5D;;;GAGG;AACH,eAAO,MAAM,+CAA+C,QAAW,CAAC;AAExE,MAAM,WAAW,YAAY;IAC5B;;;;;;;;;OASG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;;OAGG;IACH,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;CAClB;AAED,eAAO,MAAM,sBAAsB,UAAqD,CAAC;AAEzF,eAAO,MAAM,IAAI;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAuYuD,CAAC;AAEzE,MAAM,MAAM,OAAO,GAAG,MAAM,OAAO,IAAI,CAAC"}
1
+ {"version":3,"file":"hardware.d.ts","sourceRoot":"","sources":["../../src/hardware.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,eAAO,MAAM,iDAAiD,QAAW,CAAC;AAC1E,eAAO,MAAM,yDAAyD,QAAW,CAAC;AAClF,eAAO,MAAM,oCAAoC,QAAU,CAAC;AAE5D;;;GAGG;AACH,eAAO,MAAM,+CAA+C,QAAW,CAAC;AAExE,MAAM,WAAW,YAAY;IAC5B;;;;;;;;;OASG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;;OAGG;IACH,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;CAClB;AAED,eAAO,MAAM,sBAAsB,UAAqD,CAAC;AAEzF,eAAO,MAAM,IAAI;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA+YuD,CAAC;AAEzE,MAAM,MAAM,OAAO,GAAG,MAAM,OAAO,IAAI,CAAC"}
@@ -51,7 +51,7 @@ export type LocalApp = {
51
51
  */
52
52
  snippet: (model: ModelData, filepath?: string) => string | string[] | LocalAppSnippet | LocalAppSnippet[];
53
53
  });
54
- declare function isGgufModel(model: ModelData): boolean;
54
+ declare function isLlamaCppGgufModel(model: ModelData): boolean;
55
55
  /**
56
56
  * Add your new local app here.
57
57
  *
@@ -68,42 +68,42 @@ export declare const LOCAL_APPS: {
68
68
  prettyLabel: string;
69
69
  docsUrl: string;
70
70
  mainTask: "text-generation";
71
- displayOnModelPage: typeof isGgufModel;
71
+ displayOnModelPage: typeof isLlamaCppGgufModel;
72
72
  snippet: (model: ModelData, filepath?: string) => LocalAppSnippet[];
73
73
  };
74
74
  lmstudio: {
75
75
  prettyLabel: string;
76
76
  docsUrl: string;
77
77
  mainTask: "text-generation";
78
- displayOnModelPage: typeof isGgufModel;
78
+ displayOnModelPage: typeof isLlamaCppGgufModel;
79
79
  deeplink: (model: ModelData, filepath: string | undefined) => URL;
80
80
  };
81
81
  localai: {
82
82
  prettyLabel: string;
83
83
  docsUrl: string;
84
84
  mainTask: "text-generation";
85
- displayOnModelPage: typeof isGgufModel;
85
+ displayOnModelPage: typeof isLlamaCppGgufModel;
86
86
  snippet: (model: ModelData, filepath?: string) => LocalAppSnippet[];
87
87
  };
88
88
  jan: {
89
89
  prettyLabel: string;
90
90
  docsUrl: string;
91
91
  mainTask: "text-generation";
92
- displayOnModelPage: typeof isGgufModel;
92
+ displayOnModelPage: typeof isLlamaCppGgufModel;
93
93
  deeplink: (model: ModelData) => URL;
94
94
  };
95
95
  backyard: {
96
96
  prettyLabel: string;
97
97
  docsUrl: string;
98
98
  mainTask: "text-generation";
99
- displayOnModelPage: typeof isGgufModel;
99
+ displayOnModelPage: typeof isLlamaCppGgufModel;
100
100
  deeplink: (model: ModelData) => URL;
101
101
  };
102
102
  sanctum: {
103
103
  prettyLabel: string;
104
104
  docsUrl: string;
105
105
  mainTask: "text-generation";
106
- displayOnModelPage: typeof isGgufModel;
106
+ displayOnModelPage: typeof isLlamaCppGgufModel;
107
107
  deeplink: (model: ModelData) => URL;
108
108
  };
109
109
  jellybox: {
@@ -117,7 +117,7 @@ export declare const LOCAL_APPS: {
117
117
  prettyLabel: string;
118
118
  docsUrl: string;
119
119
  mainTask: "text-generation";
120
- displayOnModelPage: typeof isGgufModel;
120
+ displayOnModelPage: typeof isLlamaCppGgufModel;
121
121
  deeplink: (model: ModelData) => URL;
122
122
  };
123
123
  recursechat: {
@@ -125,7 +125,7 @@ export declare const LOCAL_APPS: {
125
125
  docsUrl: string;
126
126
  mainTask: "text-generation";
127
127
  macOSOnly: true;
128
- displayOnModelPage: typeof isGgufModel;
128
+ displayOnModelPage: typeof isLlamaCppGgufModel;
129
129
  deeplink: (model: ModelData) => URL;
130
130
  };
131
131
  drawthings: {
@@ -1 +1 @@
1
- {"version":3,"file":"local-apps.d.ts","sourceRoot":"","sources":["../../src/local-apps.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAEhD,MAAM,WAAW,eAAe;IAC/B;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;CAChB;AAED;;GAEG;AACH,MAAM,MAAM,QAAQ,GAAG;IACtB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,QAAQ,EAAE,YAAY,CAAC;IACvB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,kBAAkB,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,OAAO,CAAC;CAClD,GAAG,CACD;IACA;;OAEG;IACH,QAAQ,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,GAAG,CAAC;CACtD,GACD;IACA;;;OAGG;IACH,OAAO,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,MAAM,GAAG,MAAM,EAAE,GAAG,eAAe,GAAG,eAAe,EAAE,CAAC;CACzG,CACH,CAAC;AAEF,iBAAS,WAAW,CAAC,KAAK,EAAE,SAAS,WAEpC;AA8DD;;;;;;;;;;GAUG;AACH,eAAO,MAAM,UAAU;;;;;;yBAvES,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;;;;;;;;;yBAqCjD,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAsJ3C,CAAC;AAErC,MAAM,MAAM,WAAW,GAAG,MAAM,OAAO,UAAU,CAAC"}
1
+ {"version":3,"file":"local-apps.d.ts","sourceRoot":"","sources":["../../src/local-apps.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAEhD,MAAM,WAAW,eAAe;IAC/B;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;CAChB;AAED;;GAEG;AACH,MAAM,MAAM,QAAQ,GAAG;IACtB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,QAAQ,EAAE,YAAY,CAAC;IACvB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,kBAAkB,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,OAAO,CAAC;CAClD,GAAG,CACD;IACA;;OAEG;IACH,QAAQ,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,GAAG,CAAC;CACtD,GACD;IACA;;;OAGG;IACH,OAAO,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,MAAM,GAAG,MAAM,EAAE,GAAG,eAAe,GAAG,eAAe,EAAE,CAAC;CACzG,CACH,CAAC;AAOF,iBAAS,mBAAmB,CAAC,KAAK,EAAE,SAAS,WAE5C;AA8DD;;;;;;;;;;GAUG;AACH,eAAO,MAAM,UAAU;;;;;;yBAvES,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;;;;;;;;;yBAqCjD,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAuJ3C,CAAC;AAErC,MAAM,MAAM,WAAW,GAAG,MAAM,OAAO,UAAU,CAAC"}
@@ -106,6 +106,16 @@ export interface ModelData {
106
106
  * Example: transformers, SpeechBrain, Stanza, etc.
107
107
  */
108
108
  library_name?: string;
109
+ safetensors?: {
110
+ parameters: Record<string, number>;
111
+ total: number;
112
+ sharded: boolean;
113
+ };
114
+ gguf?: {
115
+ total: number;
116
+ architecture?: string;
117
+ context_length?: number;
118
+ };
109
119
  }
110
120
  /**
111
121
  * transformers-specific info to display in the code sample.
@@ -1 +1 @@
1
- {"version":3,"file":"model-data.d.ts","sourceRoot":"","sources":["../../src/model-data.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAChD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACtD,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAExD;;GAEG;AACH,MAAM,WAAW,SAAS;IACzB;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IACX;;;OAGG;IACH,SAAS,EAAE,MAAM,CAAC;IAClB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB;;OAEG;IACH,MAAM,CAAC,EAAE;QACR,aAAa,CAAC,EAAE,MAAM,EAAE,CAAC;QACzB;;WAEG;QACH,QAAQ,CAAC,EAAE;YACV;;eAEG;YACH,CAAC,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;SACpB,CAAC;QACF,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,mBAAmB,CAAC,EAAE;YACrB,IAAI,CAAC,EAAE,MAAM,CAAC;YACd,YAAY,CAAC,EAAE,OAAO,CAAC;YACvB,YAAY,CAAC,EAAE,OAAO,CAAC;SACvB,CAAC;QACF,gBAAgB,CAAC,EAAE,eAAe,CAAC;QACnC,oBAAoB,CAAC,EAAE;YACtB,UAAU,CAAC,EAAE,MAAM,CAAC;YACpB,WAAW,CAAC,EAAE,MAAM,CAAC;SACrB,CAAC;QACF,SAAS,CAAC,EAAE;YACX,WAAW,CAAC,EAAE,MAAM,CAAC;SACrB,CAAC;QACF,OAAO,CAAC,EAAE;YACT,KAAK,CAAC,EAAE;gBACP,IAAI,CAAC,EAAE,MAAM,CAAC;aACd,CAAC;YACF,YAAY,CAAC,EAAE,MAAM,CAAC;SACtB,CAAC;QACF,WAAW,CAAC,EAAE;YACb,qBAAqB,CAAC,EAAE,MAAM,CAAC;YAC/B,iBAAiB,CAAC,EAAE,MAAM,CAAC;YAC3B,gBAAgB,CAAC,EAAE,MAAM,CAAC;SAC1B,CAAC;QACF,IAAI,CAAC,EAAE;YACN,uBAAuB,CAAC,EAAE,MAAM,CAAC;YACjC,SAAS,CAAC,EAAE,MAAM,CAAC;SACnB,CAAC;KACF,CAAC;IACF;;OAEG;IACH,IAAI,EAAE,MAAM,EAAE,CAAC;IACf;;OAEG;IACH,gBAAgB,CAAC,EAAE,gBAAgB,CAAC;IACpC;;OAEG;IACH,YAAY,CAAC,EAAE,YAAY,GAAG,SAAS,CAAC;IACxC;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAChC;;;;;OAKG;IACH,UAAU,CAAC,EAAE,aAAa,EAAE,GAAG,SAAS,CAAC;IACzC;;;;;;;;;OASG;IACH,QAAQ,CAAC,EAAE;QACV,SAAS,CAAC,EACP,OAAO,GACP;YACA,UAAU,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;SACpC,CAAC;QACL,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,CAAC;KAC/B,CAAC;IACF;;;OAGG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;CACtB;AAED;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,YAAY,CAAC,EAAE,YAAY,CAAC;IAC5B;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;CACnB"}
1
+ {"version":3,"file":"model-data.d.ts","sourceRoot":"","sources":["../../src/model-data.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAChD,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACtD,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAExD;;GAEG;AACH,MAAM,WAAW,SAAS;IACzB;;OAEG;IACH,EAAE,EAAE,MAAM,CAAC;IACX;;;OAGG;IACH,SAAS,EAAE,MAAM,CAAC;IAClB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB;;OAEG;IACH,MAAM,CAAC,EAAE;QACR,aAAa,CAAC,EAAE,MAAM,EAAE,CAAC;QACzB;;WAEG;QACH,QAAQ,CAAC,EAAE;YACV;;eAEG;YACH,CAAC,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;SACpB,CAAC;QACF,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,mBAAmB,CAAC,EAAE;YACrB,IAAI,CAAC,EAAE,MAAM,CAAC;YACd,YAAY,CAAC,EAAE,OAAO,CAAC;YACvB,YAAY,CAAC,EAAE,OAAO,CAAC;SACvB,CAAC;QACF,gBAAgB,CAAC,EAAE,eAAe,CAAC;QACnC,oBAAoB,CAAC,EAAE;YACtB,UAAU,CAAC,EAAE,MAAM,CAAC;YACpB,WAAW,CAAC,EAAE,MAAM,CAAC;SACrB,CAAC;QACF,SAAS,CAAC,EAAE;YACX,WAAW,CAAC,EAAE,MAAM,CAAC;SACrB,CAAC;QACF,OAAO,CAAC,EAAE;YACT,KAAK,CAAC,EAAE;gBACP,IAAI,CAAC,EAAE,MAAM,CAAC;aACd,CAAC;YACF,YAAY,CAAC,EAAE,MAAM,CAAC;SACtB,CAAC;QACF,WAAW,CAAC,EAAE;YACb,qBAAqB,CAAC,EAAE,MAAM,CAAC;YAC/B,iBAAiB,CAAC,EAAE,MAAM,CAAC;YAC3B,gBAAgB,CAAC,EAAE,MAAM,CAAC;SAC1B,CAAC;QACF,IAAI,CAAC,EAAE;YACN,uBAAuB,CAAC,EAAE,MAAM,CAAC;YACjC,SAAS,CAAC,EAAE,MAAM,CAAC;SACnB,CAAC;KACF,CAAC;IACF;;OAEG;IACH,IAAI,EAAE,MAAM,EAAE,CAAC;IACf;;OAEG;IACH,gBAAgB,CAAC,EAAE,gBAAgB,CAAC;IACpC;;OAEG;IACH,YAAY,CAAC,EAAE,YAAY,GAAG,SAAS,CAAC;IACxC;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAChC;;;;;OAKG;IACH,UAAU,CAAC,EAAE,aAAa,EAAE,GAAG,SAAS,CAAC;IACzC;;;;;;;;;OASG;IACH,QAAQ,CAAC,EAAE;QACV,SAAS,CAAC,EACP,OAAO,GACP;YACA,UAAU,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;SACpC,CAAC;QACL,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,CAAC;KAC/B,CAAC;IACF;;;OAGG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,WAAW,CAAC,EAAE;QACb,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;QACnC,KAAK,EAAE,MAAM,CAAC;QACd,OAAO,EAAE,OAAO,CAAC;KACjB,CAAC;IACF,IAAI,CAAC,EAAE;QACN,KAAK,EAAE,MAAM,CAAC;QACd,YAAY,CAAC,EAAE,MAAM,CAAC;QACtB,cAAc,CAAC,EAAE,MAAM,CAAC;KACxB,CAAC;CACF;AAED;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,YAAY,CAAC,EAAE,YAAY,CAAC;IAC5B;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;CACnB"}
@@ -7,6 +7,8 @@ export declare const bertopic: (model: ModelData) => string[];
7
7
  export declare const bm25s: (model: ModelData) => string[];
8
8
  export declare const depth_anything_v2: (model: ModelData) => string[];
9
9
  export declare const diffusers: (model: ModelData) => string[];
10
+ export declare const cartesia_pytorch: (model: ModelData) => string[];
11
+ export declare const cartesia_mlx: (model: ModelData) => string[];
10
12
  export declare const edsnlp: (model: ModelData) => string[];
11
13
  export declare const espnetTTS: (model: ModelData) => string[];
12
14
  export declare const espnetASR: (model: ModelData) => string[];
@@ -29,6 +31,7 @@ export declare const relik: (model: ModelData) => string[];
29
31
  export declare const tensorflowtts: (model: ModelData) => string[];
30
32
  export declare const timm: (model: ModelData) => string[];
31
33
  export declare const saelens: () => string[];
34
+ export declare const seed_story: () => string[];
32
35
  export declare const sklearn: (model: ModelData) => string[];
33
36
  export declare const stable_audio_tools: (model: ModelData) => string[];
34
37
  export declare const fastai: (model: ModelData) => string[];
@@ -1 +1 @@
1
- {"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAY9C,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAMF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,iBAAiB,UAAW,SAAS,KAAG,MAAM,EA6C1D,CAAC;AA+BF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAS9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,OAAO,QAA6B,MAAM,EAQtD,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EA2B7C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AAEF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAI7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EA4CrD,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,OAAO,QAAO,MAAM,EAYhC,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAYjD,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAK5C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AA6BF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAUnD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAYnC,CAAC"}
1
+ {"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAY9C,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAMF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,iBAAiB,UAAW,SAAS,KAAG,MAAM,EA6C1D,CAAC;AA+BF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EAmBrD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAS9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,OAAO,QAA6B,MAAM,EAQtD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAanC,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EA2B7C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AAEF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAI7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EA4CrD,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,OAAO,QAAO,MAAM,EAYhC,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAYjD,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAK5C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AA6BF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAUnD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAYnC,CAAC"}
@@ -146,6 +146,13 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
146
146
  filter: false;
147
147
  countDownloads: string;
148
148
  };
149
+ deepforest: {
150
+ prettyLabel: string;
151
+ repoName: string;
152
+ docsUrl: string;
153
+ repoUrl: string;
154
+ countDownloads: string;
155
+ };
149
156
  "depth-anything-v2": {
150
157
  prettyLabel: string;
151
158
  repoName: string;
@@ -174,6 +181,18 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
174
181
  repoName: string;
175
182
  repoUrl: string;
176
183
  };
184
+ cartesia_pytorch: {
185
+ prettyLabel: string;
186
+ repoName: string;
187
+ repoUrl: string;
188
+ snippets: (model: ModelData) => string[];
189
+ };
190
+ cartesia_mlx: {
191
+ prettyLabel: string;
192
+ repoName: string;
193
+ repoUrl: string;
194
+ snippets: (model: ModelData) => string[];
195
+ };
177
196
  edsnlp: {
178
197
  prettyLabel: string;
179
198
  repoName: string;
@@ -533,6 +552,14 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
533
552
  filter: false;
534
553
  countDownloads: string;
535
554
  };
555
+ "seed-story": {
556
+ prettyLabel: string;
557
+ repoName: string;
558
+ repoUrl: string;
559
+ filter: false;
560
+ countDownloads: string;
561
+ snippets: () => string[];
562
+ };
536
563
  "stable-baselines3": {
537
564
  prettyLabel: string;
538
565
  repoName: string;
@@ -621,6 +648,6 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
621
648
  };
622
649
  };
623
650
  export type ModelLibraryKey = keyof typeof MODEL_LIBRARIES_UI_ELEMENTS;
624
- export declare const ALL_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "depth-anything-v2" | "diffree" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
625
- export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "depth-anything-v2" | "diffree" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
651
+ export declare const ALL_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "deepforest" | "depth-anything-v2" | "diffree" | "diffusers" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
652
+ export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "deepforest" | "depth-anything-v2" | "diffree" | "diffusers" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
626
653
  //# sourceMappingURL=model-libraries.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAwjBI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,2gCAAgE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,2gCAQ1B,CAAC"}
1
+ {"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAmlBI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,+kCAAgE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,+kCAQ1B,CAAC"}
@@ -1 +1 @@
1
- {"version":3,"file":"curl.d.ts","sourceRoot":"","sources":["../../../src/snippets/curl.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEpD,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAEnD,eAAO,MAAM,YAAY,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAM3E,CAAC;AAEF,eAAO,MAAM,qBAAqB,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAgBpF,CAAC;AAEF,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAM5F,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAK1E,CAAC;AAEF,eAAO,MAAM,YAAY,EAAE,OAAO,CAAC,MAAM,CAAC,YAAY,EAAE,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,KAAK,MAAM,CAAC,CAwBhH,CAAC;AAEF,wBAAgB,uBAAuB,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,CAI5F;AAED,wBAAgB,uBAAuB,CAAC,KAAK,EAAE,IAAI,CAAC,gBAAgB,EAAE,cAAc,CAAC,GAAG,OAAO,CAE9F"}
1
+ {"version":3,"file":"curl.d.ts","sourceRoot":"","sources":["../../../src/snippets/curl.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEpD,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAEnD,eAAO,MAAM,YAAY,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAKhB,CAAC;AAE7D,eAAO,MAAM,qBAAqB,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAgBpF,CAAC;AAEF,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAKjC,CAAC;AAE7D,eAAO,MAAM,WAAW,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAIf,CAAC;AAE7D,eAAO,MAAM,YAAY,EAAE,OAAO,CAAC,MAAM,CAAC,YAAY,EAAE,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,KAAK,MAAM,CAAC,CAwBhH,CAAC;AAEF,wBAAgB,uBAAuB,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,CAI5F;AAED,wBAAgB,uBAAuB,CAAC,KAAK,EAAE,IAAI,CAAC,gBAAgB,EAAE,cAAc,CAAC,GAAG,OAAO,CAE9F"}
@@ -1 +1 @@
1
- {"version":3,"file":"js.d.ts","sourceRoot":"","sources":["../../../src/snippets/js.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEpD,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAEnD,eAAO,MAAM,YAAY,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAmBxE,CAAC;AAEL,eAAO,MAAM,qBAAqB,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAkBpF,CAAC;AACF,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAqBzF,CAAC;AAEL,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAkB9E,CAAC;AAEL,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAqCjF,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAoBvE,CAAC;AAEL,eAAO,MAAM,UAAU,EAAE,OAAO,CAAC,MAAM,CAAC,YAAY,EAAE,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,KAAK,MAAM,CAAC,CAwB9G,CAAC;AAEF,wBAAgB,qBAAqB,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,CAI1F;AAED,wBAAgB,qBAAqB,CAAC,KAAK,EAAE,gBAAgB,GAAG,OAAO,CAEtE"}
1
+ {"version":3,"file":"js.d.ts","sourceRoot":"","sources":["../../../src/snippets/js.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEpD,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAEnD,eAAO,MAAM,YAAY,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAmBxE,CAAC;AAEL,eAAO,MAAM,qBAAqB,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAiBpF,CAAC;AACF,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAqBzF,CAAC;AAEL,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAkB9E,CAAC;AAEL,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAqCjF,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAoBvE,CAAC;AAEL,eAAO,MAAM,UAAU,EAAE,OAAO,CAAC,MAAM,CAAC,YAAY,EAAE,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,KAAK,MAAM,CAAC,CAwB9G,CAAC;AAEF,wBAAgB,qBAAqB,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,CAI1F;AAED,wBAAgB,qBAAqB,CAAC,KAAK,EAAE,gBAAgB,GAAG,OAAO,CAEtE"}
@@ -1 +1 @@
1
- {"version":3,"file":"python.d.ts","sourceRoot":"","sources":["../../../src/snippets/python.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEpD,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAEnD,eAAO,MAAM,qBAAqB,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAcpF,CAAC;AAEF,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,KAAG,MAQrE,CAAC;AAEJ,eAAO,MAAM,kCAAkC,UAAW,gBAAgB,KAAG,MAc1E,CAAC;AAEJ,eAAO,MAAM,YAAY,UAAW,gBAAgB,KAAG,MAOpD,CAAC;AAEJ,eAAO,MAAM,WAAW,UAAW,gBAAgB,KAAG,MAOP,CAAC;AAEhD,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,KAAG,MAUjB,CAAC;AAE7C,eAAO,MAAM,cAAc,UAAW,gBAAgB,KAAG,MAMtD,CAAC;AAEJ,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,KAAG,MA2B5D,CAAC;AAEF,eAAO,MAAM,gCAAgC,UAAW,gBAAgB,KAAG,MAUxE,CAAC;AAEJ,eAAO,MAAM,cAAc,EAAE,OAAO,CAAC,MAAM,CAAC,YAAY,EAAE,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,KAAK,MAAM,CAAC,CA4BlH,CAAC;AAEF,wBAAgB,yBAAyB,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,CAiB9F;AAED,wBAAgB,yBAAyB,CAAC,KAAK,EAAE,gBAAgB,GAAG,OAAO,CAE1E"}
1
+ {"version":3,"file":"python.d.ts","sourceRoot":"","sources":["../../../src/snippets/python.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEpD,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAEnD,eAAO,MAAM,qBAAqB,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAajC,CAAC;AAErD,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,KAAG,MAQrE,CAAC;AAEJ,eAAO,MAAM,kCAAkC,UAAW,gBAAgB,KAAG,MAc1E,CAAC;AAEJ,eAAO,MAAM,YAAY,UAAW,gBAAgB,KAAG,MAOpD,CAAC;AAEJ,eAAO,MAAM,WAAW,UAAW,gBAAgB,KAAG,MAOP,CAAC;AAEhD,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,KAAG,MAUjB,CAAC;AAE7C,eAAO,MAAM,cAAc,UAAW,gBAAgB,KAAG,MAMtD,CAAC;AAEJ,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,KAAG,MA2B5D,CAAC;AAEF,eAAO,MAAM,gCAAgC,UAAW,gBAAgB,KAAG,MAUxE,CAAC;AAEJ,eAAO,MAAM,cAAc,EAAE,OAAO,CAAC,MAAM,CAAC,YAAY,EAAE,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,KAAK,MAAM,CAAC,CA4BlH,CAAC;AAEF,wBAAgB,yBAAyB,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,CAiB9F;AAED,wBAAgB,yBAAyB,CAAC,KAAK,EAAE,gBAAgB,GAAG,OAAO,CAE1E"}
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@huggingface/tasks",
3
3
  "packageManager": "pnpm@8.10.5",
4
- "version": "0.11.10",
4
+ "version": "0.11.12",
5
5
  "description": "List of ML tasks for huggingface.co/tasks",
6
6
  "repository": "https://github.com/huggingface/huggingface.js.git",
7
7
  "publishConfig": {
@@ -77,6 +77,12 @@ export const DATASET_LIBRARIES_UI_ELEMENTS = {
77
77
  repoUrl: "https://github.com/pola-rs/polars",
78
78
  docsUrl: "https://huggingface.co/docs/hub/datasets-polars",
79
79
  },
80
+ duckdb: {
81
+ prettyLabel: "DuckDB",
82
+ repoName: "duckdb",
83
+ repoUrl: "https://github.com/duckdb/duckdb",
84
+ docsUrl: "https://huggingface.co/docs/hub/datasets-duckdb",
85
+ },
80
86
  } satisfies Record<string, DatasetLibraryUiElement>;
81
87
 
82
88
  /// List of the dataset libraries supported by the Hub
package/src/hardware.ts CHANGED
@@ -164,6 +164,14 @@ export const SKUS = {
164
164
  tflops: 12.74,
165
165
  memory: [12, 8],
166
166
  },
167
+ "RTX 2080 Ti": {
168
+ tflops: 26.9,
169
+ memory: [11],
170
+ },
171
+ "RTX 2080": {
172
+ tflops: 20.14,
173
+ memory: [8],
174
+ },
167
175
  "RTX 2070": {
168
176
  tflops: 14.93,
169
177
  memory: [8],
package/src/local-apps.ts CHANGED
@@ -58,10 +58,15 @@ export type LocalApp = {
58
58
  }
59
59
  );
60
60
 
61
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars
61
62
  function isGgufModel(model: ModelData) {
62
63
  return model.tags.includes("gguf");
63
64
  }
64
65
 
66
+ function isLlamaCppGgufModel(model: ModelData) {
67
+ return !!model.gguf?.context_length;
68
+ }
69
+
65
70
  const snippetLlamacpp = (model: ModelData, filepath?: string): LocalAppSnippet[] => {
66
71
  const command = (binary: string) =>
67
72
  [
@@ -138,14 +143,14 @@ export const LOCAL_APPS = {
138
143
  prettyLabel: "llama.cpp",
139
144
  docsUrl: "https://github.com/ggerganov/llama.cpp",
140
145
  mainTask: "text-generation",
141
- displayOnModelPage: isGgufModel,
146
+ displayOnModelPage: isLlamaCppGgufModel,
142
147
  snippet: snippetLlamacpp,
143
148
  },
144
149
  lmstudio: {
145
150
  prettyLabel: "LM Studio",
146
151
  docsUrl: "https://lmstudio.ai",
147
152
  mainTask: "text-generation",
148
- displayOnModelPage: isGgufModel,
153
+ displayOnModelPage: isLlamaCppGgufModel,
149
154
  deeplink: (model, filepath) =>
150
155
  new URL(`lmstudio://open_from_hf?model=${model.id}${filepath ? `&file=${filepath}` : ""}`),
151
156
  },
@@ -153,28 +158,28 @@ export const LOCAL_APPS = {
153
158
  prettyLabel: "LocalAI",
154
159
  docsUrl: "https://github.com/mudler/LocalAI",
155
160
  mainTask: "text-generation",
156
- displayOnModelPage: isGgufModel,
161
+ displayOnModelPage: isLlamaCppGgufModel,
157
162
  snippet: snippetLocalAI,
158
163
  },
159
164
  jan: {
160
165
  prettyLabel: "Jan",
161
166
  docsUrl: "https://jan.ai",
162
167
  mainTask: "text-generation",
163
- displayOnModelPage: isGgufModel,
168
+ displayOnModelPage: isLlamaCppGgufModel,
164
169
  deeplink: (model) => new URL(`jan://models/huggingface/${model.id}`),
165
170
  },
166
171
  backyard: {
167
172
  prettyLabel: "Backyard AI",
168
173
  docsUrl: "https://backyard.ai",
169
174
  mainTask: "text-generation",
170
- displayOnModelPage: isGgufModel,
175
+ displayOnModelPage: isLlamaCppGgufModel,
171
176
  deeplink: (model) => new URL(`https://backyard.ai/hf/model/${model.id}`),
172
177
  },
173
178
  sanctum: {
174
179
  prettyLabel: "Sanctum",
175
180
  docsUrl: "https://sanctum.ai",
176
181
  mainTask: "text-generation",
177
- displayOnModelPage: isGgufModel,
182
+ displayOnModelPage: isLlamaCppGgufModel,
178
183
  deeplink: (model) => new URL(`sanctum://open_from_hf?model=${model.id}`),
179
184
  },
180
185
  jellybox: {
@@ -182,12 +187,12 @@ export const LOCAL_APPS = {
182
187
  docsUrl: "https://jellybox.com",
183
188
  mainTask: "text-generation",
184
189
  displayOnModelPage: (model) =>
185
- isGgufModel(model) ||
190
+ isLlamaCppGgufModel(model) ||
186
191
  (model.library_name === "diffusers" &&
187
192
  model.tags.includes("safetensors") &&
188
193
  (model.pipeline_tag === "text-to-image" || model.tags.includes("lora"))),
189
194
  deeplink: (model) => {
190
- if (isGgufModel(model)) {
195
+ if (isLlamaCppGgufModel(model)) {
191
196
  return new URL(`jellybox://llm/models/huggingface/LLM/${model.id}`);
192
197
  } else if (model.tags.includes("lora")) {
193
198
  return new URL(`jellybox://image/models/huggingface/ImageLora/${model.id}`);
@@ -200,7 +205,7 @@ export const LOCAL_APPS = {
200
205
  prettyLabel: "Msty",
201
206
  docsUrl: "https://msty.app",
202
207
  mainTask: "text-generation",
203
- displayOnModelPage: isGgufModel,
208
+ displayOnModelPage: isLlamaCppGgufModel,
204
209
  deeplink: (model) => new URL(`msty://models/search/hf/${model.id}`),
205
210
  },
206
211
  recursechat: {
@@ -208,7 +213,7 @@ export const LOCAL_APPS = {
208
213
  docsUrl: "https://recurse.chat",
209
214
  mainTask: "text-generation",
210
215
  macOSOnly: true,
211
- displayOnModelPage: isGgufModel,
216
+ displayOnModelPage: isLlamaCppGgufModel,
212
217
  deeplink: (model) => new URL(`recursechat://new-hf-gguf-model?hf-model-id=${model.id}`),
213
218
  },
214
219
  drawthings: {
@@ -232,14 +237,15 @@ export const LOCAL_APPS = {
232
237
  mainTask: "text-to-image",
233
238
  macOSOnly: true,
234
239
  displayOnModelPage: (model) => model.library_name === "diffusers" && model.pipeline_tag === "text-to-image",
235
- deeplink: (model) => new URL(`diffusionbee://open_from_hf?model=${model.id}`),
240
+ deeplink: (model) => new URL(`https://diffusionbee.com/huggingface_import?model_id=${model.id}`),
236
241
  },
237
242
  joyfusion: {
238
243
  prettyLabel: "JoyFusion",
239
244
  docsUrl: "https://joyfusion.app",
240
245
  mainTask: "text-to-image",
241
246
  macOSOnly: true,
242
- displayOnModelPage: (model) => model.tags.includes("coreml") && model.tags.includes("joyfusion") && model.pipeline_tag === "text-to-image",
247
+ displayOnModelPage: (model) =>
248
+ model.tags.includes("coreml") && model.tags.includes("joyfusion") && model.pipeline_tag === "text-to-image",
243
249
  deeplink: (model) => new URL(`https://joyfusion.app/import_from_hf?repo_id=${model.id}`),
244
250
  },
245
251
  invoke: {
package/src/model-data.ts CHANGED
@@ -109,6 +109,16 @@ export interface ModelData {
109
109
  * Example: transformers, SpeechBrain, Stanza, etc.
110
110
  */
111
111
  library_name?: string;
112
+ safetensors?: {
113
+ parameters: Record<string, number>;
114
+ total: number;
115
+ sharded: boolean;
116
+ };
117
+ gguf?: {
118
+ total: number;
119
+ architecture?: string;
120
+ context_length?: number;
121
+ };
112
122
  }
113
123
 
114
124
  /**
@@ -170,6 +170,45 @@ export const diffusers = (model: ModelData): string[] => {
170
170
  }
171
171
  };
172
172
 
173
+ export const cartesia_pytorch = (model: ModelData): string[] => [
174
+ `# pip install --no-binary :all: cartesia-pytorch
175
+ from cartesia_pytorch import ReneLMHeadModel
176
+ from transformers import AutoTokenizer
177
+
178
+ model = ReneLMHeadModel.from_pretrained("${model.id}")
179
+ tokenizer = AutoTokenizer.from_pretrained("allenai/OLMo-1B-hf")
180
+
181
+ in_message = ["Rene Descartes was"]
182
+ inputs = tokenizer(in_message, return_tensors="pt")
183
+
184
+ outputs = model.generate(inputs.input_ids, max_length=50, top_k=100, top_p=0.99)
185
+ out_message = tokenizer.batch_decode(outputs, skip_special_tokens=True)[0]
186
+
187
+ print(out_message)
188
+ )`,
189
+ ];
190
+
191
+ export const cartesia_mlx = (model: ModelData): string[] => [
192
+ `import mlx.core as mx
193
+ import cartesia_mlx as cmx
194
+
195
+ model = cmx.from_pretrained("${model.id}")
196
+ model.set_dtype(mx.float32)
197
+
198
+ prompt = "Rene Descartes was"
199
+
200
+ for text in model.generate(
201
+ prompt,
202
+ max_tokens=500,
203
+ eval_every_n=5,
204
+ verbose=True,
205
+ top_p=0.99,
206
+ temperature=0.85,
207
+ ):
208
+ print(text, end="", flush=True)
209
+ `,
210
+ ];
211
+
173
212
  export const edsnlp = (model: ModelData): string[] => {
174
213
  const packageName = nameWithoutNamespace(model.id).replaceAll("-", "_");
175
214
  return [
@@ -270,12 +309,12 @@ llm = Llama.from_pretrained(
270
309
  )
271
310
 
272
311
  llm.create_chat_completion(
273
- messages = [
274
- {
275
- "role": "user",
276
- "content": "What is the capital of France?"
277
- }
278
- ]
312
+ messages = [
313
+ {
314
+ "role": "user",
315
+ "content": "What is the capital of France?"
316
+ }
317
+ ]
279
318
  )`,
280
319
  ];
281
320
 
@@ -435,6 +474,21 @@ sae, cfg_dict, sparsity = SAE.from_pretrained(
435
474
  )`,
436
475
  ];
437
476
 
477
+ export const seed_story = (): string[] => [
478
+ `# seed_story_cfg_path refers to 'https://github.com/TencentARC/SEED-Story/blob/master/configs/clm_models/agent_7b_sft.yaml'
479
+ # llm_cfg_path refers to 'https://github.com/TencentARC/SEED-Story/blob/master/configs/clm_models/llama2chat7b_lora.yaml'
480
+ from omegaconf import OmegaConf
481
+ import hydra
482
+
483
+ # load Llama2
484
+ llm_cfg = OmegaConf.load(llm_cfg_path)
485
+ llm = hydra.utils.instantiate(llm_cfg, torch_dtype="fp16")
486
+
487
+ # initialize seed_story
488
+ seed_story_cfg = OmegaConf.load(seed_story_cfg_path)
489
+ seed_story = hydra.utils.instantiate(seed_story_cfg, llm=llm) `,
490
+ ];
491
+
438
492
  const skopsPickle = (model: ModelData, modelFile: string) => {
439
493
  return [
440
494
  `import joblib
@@ -150,6 +150,13 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
150
150
  filter: false,
151
151
  countDownloads: `path:"adapter_config.json"`,
152
152
  },
153
+ deepforest: {
154
+ prettyLabel: "DeepForest",
155
+ repoName: "deepforest",
156
+ docsUrl: "https://deepforest.readthedocs.io/en/latest/",
157
+ repoUrl: "https://github.com/weecology/DeepForest",
158
+ countDownloads: `path_extension:"pt" OR path_extension:"pl"`,
159
+ },
153
160
  "depth-anything-v2": {
154
161
  prettyLabel: "DepthAnythingV2",
155
162
  repoName: "Depth Anything V2",
@@ -179,6 +186,18 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
179
186
  repoName: "doctr",
180
187
  repoUrl: "https://github.com/mindee/doctr",
181
188
  },
189
+ cartesia_pytorch: {
190
+ prettyLabel: "Cartesia Pytorch",
191
+ repoName: "Cartesia Pytorch",
192
+ repoUrl: "https://github.com/cartesia-ai/cartesia_pytorch",
193
+ snippets: snippets.cartesia_pytorch,
194
+ },
195
+ cartesia_mlx: {
196
+ prettyLabel: "Cartesia MLX",
197
+ repoName: "Cartesia MLX",
198
+ repoUrl: "https://github.com/cartesia-ai/cartesia_mlx",
199
+ snippets: snippets.cartesia_mlx,
200
+ },
182
201
  edsnlp: {
183
202
  prettyLabel: "EDS-NLP",
184
203
  repoName: "edsnlp",
@@ -539,6 +558,14 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
539
558
  filter: false,
540
559
  countDownloads: `path_extension:"safetensors"`,
541
560
  },
561
+ "seed-story": {
562
+ prettyLabel: "SEED-Story",
563
+ repoName: "SEED-Story",
564
+ repoUrl: "https://github.com/TencentARC/SEED-Story",
565
+ filter: false,
566
+ countDownloads: `path:"cvlm_llama2_tokenizer/tokenizer.model"`,
567
+ snippets: snippets.seed_story,
568
+ },
542
569
  "stable-baselines3": {
543
570
  prettyLabel: "stable-baselines3",
544
571
  repoName: "stable-baselines3",
@@ -7,8 +7,7 @@ export const snippetBasic = (model: ModelDataMinimal, accessToken: string): stri
7
7
  -X POST \\
8
8
  -d '{"inputs": ${getModelInputSnippet(model, true)}}' \\
9
9
  -H 'Content-Type: application/json' \\
10
- -H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}"
11
- `;
10
+ -H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}"`;
12
11
 
13
12
  export const snippetTextGeneration = (model: ModelDataMinimal, accessToken: string): string => {
14
13
  if (model.config?.tokenizer_config?.chat_template) {
@@ -33,15 +32,13 @@ export const snippetZeroShotClassification = (model: ModelDataMinimal, accessTok
33
32
  -X POST \\
34
33
  -d '{"inputs": ${getModelInputSnippet(model, true)}, "parameters": {"candidate_labels": ["refund", "legal", "faq"]}}' \\
35
34
  -H 'Content-Type: application/json' \\
36
- -H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}"
37
- `;
35
+ -H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}"`;
38
36
 
39
37
  export const snippetFile = (model: ModelDataMinimal, accessToken: string): string =>
40
38
  `curl https://api-inference.huggingface.co/models/${model.id} \\
41
39
  -X POST \\
42
40
  --data-binary '@${getModelInputSnippet(model, true, true)}' \\
43
- -H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}"
44
- `;
41
+ -H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}"`;
45
42
 
46
43
  export const curlSnippets: Partial<Record<PipelineType, (model: ModelDataMinimal, accessToken: string) => string>> = {
47
44
  // Same order as in js/src/lib/interfaces/Types.ts
@@ -36,8 +36,7 @@ for await (const chunk of inference.chatCompletionStream({
36
36
  max_tokens: 500,
37
37
  })) {
38
38
  process.stdout.write(chunk.choices[0]?.delta?.content || "");
39
- }
40
- `;
39
+ }`;
41
40
  } else {
42
41
  return snippetBasic(model, accessToken);
43
42
  }
@@ -15,8 +15,7 @@ for message in client.chat_completion(
15
15
  max_tokens=500,
16
16
  stream=True,
17
17
  ):
18
- print(message.choices[0].delta.content, end="")
19
- `;
18
+ print(message.choices[0].delta.content, end="")`;
20
19
 
21
20
  export const snippetZeroShotClassification = (model: ModelDataMinimal): string =>
22
21
  `def query(payload):