@huggingface/tasks 0.10.8 → 0.10.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +133 -29
- package/dist/index.js +133 -29
- package/dist/src/hardware.d.ts +90 -2
- package/dist/src/hardware.d.ts.map +1 -1
- package/dist/src/local-apps.d.ts +7 -0
- package/dist/src/local-apps.d.ts.map +1 -1
- package/dist/src/model-libraries-snippets.d.ts.map +1 -1
- package/package.json +1 -1
- package/src/hardware.ts +92 -4
- package/src/local-apps.ts +15 -5
- package/src/model-libraries-snippets.ts +11 -7
- package/src/tasks/automatic-speech-recognition/about.md +16 -16
- package/src/tasks/automatic-speech-recognition/data.ts +7 -7
- package/src/tasks/text-to-speech/about.md +1 -3
- package/src/tasks/text-to-speech/data.ts +6 -6
package/dist/index.cjs
CHANGED
|
@@ -1541,16 +1541,16 @@ var data_default2 = taskData2;
|
|
|
1541
1541
|
var taskData3 = {
|
|
1542
1542
|
datasets: [
|
|
1543
1543
|
{
|
|
1544
|
-
description: "
|
|
1545
|
-
id: "mozilla-foundation/
|
|
1544
|
+
description: "31,175 hours of multilingual audio-text dataset in 108 languages.",
|
|
1545
|
+
id: "mozilla-foundation/common_voice_17_0"
|
|
1546
1546
|
},
|
|
1547
1547
|
{
|
|
1548
1548
|
description: "An English dataset with 1,000 hours of data.",
|
|
1549
1549
|
id: "librispeech_asr"
|
|
1550
1550
|
},
|
|
1551
1551
|
{
|
|
1552
|
-
description: "
|
|
1553
|
-
id: "
|
|
1552
|
+
description: "A multi-lingual audio dataset with 370K hours of audio.",
|
|
1553
|
+
id: "espnet/yodas"
|
|
1554
1554
|
}
|
|
1555
1555
|
],
|
|
1556
1556
|
demo: {
|
|
@@ -1585,12 +1585,12 @@ var taskData3 = {
|
|
|
1585
1585
|
id: "openai/whisper-large-v3"
|
|
1586
1586
|
},
|
|
1587
1587
|
{
|
|
1588
|
-
description: "A good generic
|
|
1589
|
-
id: "facebook/
|
|
1588
|
+
description: "A good generic speech model by MetaAI for fine-tuning.",
|
|
1589
|
+
id: "facebook/w2v-bert-2.0"
|
|
1590
1590
|
},
|
|
1591
1591
|
{
|
|
1592
1592
|
description: "An end-to-end model that performs ASR and Speech Translation by MetaAI.",
|
|
1593
|
-
id: "facebook/
|
|
1593
|
+
id: "facebook/seamless-m4t-v2-large"
|
|
1594
1594
|
}
|
|
1595
1595
|
],
|
|
1596
1596
|
spaces: [
|
|
@@ -2992,8 +2992,8 @@ var taskData24 = {
|
|
|
2992
2992
|
canonicalId: "text-to-audio",
|
|
2993
2993
|
datasets: [
|
|
2994
2994
|
{
|
|
2995
|
-
description: "
|
|
2996
|
-
id: "
|
|
2995
|
+
description: "10K hours of multi-speaker English dataset.",
|
|
2996
|
+
id: "parler-tts/mls_eng_10k"
|
|
2997
2997
|
},
|
|
2998
2998
|
{
|
|
2999
2999
|
description: "Multi-speaker English dataset.",
|
|
@@ -3031,8 +3031,8 @@ var taskData24 = {
|
|
|
3031
3031
|
id: "facebook/mms-tts"
|
|
3032
3032
|
},
|
|
3033
3033
|
{
|
|
3034
|
-
description: "
|
|
3035
|
-
id: "
|
|
3034
|
+
description: "A prompt based, powerful TTS model.",
|
|
3035
|
+
id: "parler-tts/parler_tts_mini_v0.1"
|
|
3036
3036
|
}
|
|
3037
3037
|
],
|
|
3038
3038
|
spaces: [
|
|
@@ -3045,8 +3045,8 @@ var taskData24 = {
|
|
|
3045
3045
|
id: "coqui/xtts"
|
|
3046
3046
|
},
|
|
3047
3047
|
{
|
|
3048
|
-
description: "An application that synthesizes speech for
|
|
3049
|
-
id: "
|
|
3048
|
+
description: "An application that synthesizes speech for diverse speaker prompts.",
|
|
3049
|
+
id: "parler-tts/parler_tts_mini"
|
|
3050
3050
|
}
|
|
3051
3051
|
],
|
|
3052
3052
|
summary: "Text-to-Speech (TTS) is the task of generating natural sounding speech given text input. TTS models can be extended to have a single model that generates speech for multiple speakers and multiple languages.",
|
|
@@ -4426,13 +4426,15 @@ var transformers = (model) => {
|
|
|
4426
4426
|
].join("\n");
|
|
4427
4427
|
}
|
|
4428
4428
|
if (model.pipeline_tag && LIBRARY_TASK_MAPPING.transformers?.includes(model.pipeline_tag)) {
|
|
4429
|
-
const pipelineSnippet = [
|
|
4430
|
-
|
|
4431
|
-
"
|
|
4432
|
-
|
|
4433
|
-
|
|
4434
|
-
|
|
4435
|
-
|
|
4429
|
+
const pipelineSnippet = ["# Use a pipeline as a high-level helper", "from transformers import pipeline", ""];
|
|
4430
|
+
if (model.tags.includes("conversational") && model.config?.tokenizer_config?.chat_template) {
|
|
4431
|
+
pipelineSnippet.push("messages = [", ' {"role": "user", "content": "Who are you?"},', "]");
|
|
4432
|
+
}
|
|
4433
|
+
pipelineSnippet.push(`pipe = pipeline("${model.pipeline_tag}", model="${model.id}"` + remote_code_snippet + ")");
|
|
4434
|
+
if (model.tags.includes("conversational") && model.config?.tokenizer_config?.chat_template) {
|
|
4435
|
+
pipelineSnippet.push("pipe(messages)");
|
|
4436
|
+
}
|
|
4437
|
+
return [pipelineSnippet.join("\n"), autoSnippet];
|
|
4436
4438
|
}
|
|
4437
4439
|
return [autoSnippet];
|
|
4438
4440
|
};
|
|
@@ -5476,6 +5478,30 @@ var SKUS = {
|
|
|
5476
5478
|
tflops: 91.1,
|
|
5477
5479
|
memory: [48]
|
|
5478
5480
|
},
|
|
5481
|
+
"RTX 5880 Ada": {
|
|
5482
|
+
tflops: 69.3,
|
|
5483
|
+
memory: [48]
|
|
5484
|
+
},
|
|
5485
|
+
"RTX 5000 Ada": {
|
|
5486
|
+
tflops: 65.3,
|
|
5487
|
+
memory: [32]
|
|
5488
|
+
},
|
|
5489
|
+
"RTX 4500 Ada": {
|
|
5490
|
+
tflops: 39.6,
|
|
5491
|
+
memory: [24]
|
|
5492
|
+
},
|
|
5493
|
+
"RTX 4000 Ada": {
|
|
5494
|
+
tflops: 26.7,
|
|
5495
|
+
memory: [20]
|
|
5496
|
+
},
|
|
5497
|
+
"RTX 4000 SFF Ada": {
|
|
5498
|
+
tflops: 19.2,
|
|
5499
|
+
memory: [20]
|
|
5500
|
+
},
|
|
5501
|
+
"RTX 2000 Ada": {
|
|
5502
|
+
tflops: 12,
|
|
5503
|
+
memory: [16]
|
|
5504
|
+
},
|
|
5479
5505
|
A100: {
|
|
5480
5506
|
tflops: 77.97,
|
|
5481
5507
|
memory: [80, 40]
|
|
@@ -5488,14 +5514,14 @@ var SKUS = {
|
|
|
5488
5514
|
tflops: 31.24,
|
|
5489
5515
|
memory: [24]
|
|
5490
5516
|
},
|
|
5491
|
-
T4: {
|
|
5492
|
-
tflops: 65.13,
|
|
5493
|
-
memory: [16]
|
|
5494
|
-
},
|
|
5495
5517
|
"RTX 4090": {
|
|
5496
5518
|
tflops: 82.58,
|
|
5497
5519
|
memory: [24]
|
|
5498
5520
|
},
|
|
5521
|
+
"RTX 4090D": {
|
|
5522
|
+
tflops: 79.49,
|
|
5523
|
+
memory: [24]
|
|
5524
|
+
},
|
|
5499
5525
|
"RTX 4080 SUPER": {
|
|
5500
5526
|
tflops: 52.2,
|
|
5501
5527
|
memory: [16]
|
|
@@ -5520,6 +5546,14 @@ var SKUS = {
|
|
|
5520
5546
|
tflops: 44.1,
|
|
5521
5547
|
memory: [16]
|
|
5522
5548
|
},
|
|
5549
|
+
"RTX 4060": {
|
|
5550
|
+
tflops: 15.11,
|
|
5551
|
+
memory: [8]
|
|
5552
|
+
},
|
|
5553
|
+
"RTX 4060 Ti": {
|
|
5554
|
+
tflops: 22.06,
|
|
5555
|
+
memory: [8, 16]
|
|
5556
|
+
},
|
|
5523
5557
|
"RTX 3090": {
|
|
5524
5558
|
tflops: 35.58,
|
|
5525
5559
|
memory: [24]
|
|
@@ -5556,13 +5590,57 @@ var SKUS = {
|
|
|
5556
5590
|
tflops: 12.74,
|
|
5557
5591
|
memory: [12, 8]
|
|
5558
5592
|
},
|
|
5593
|
+
"RTX 2070": {
|
|
5594
|
+
tflops: 14.93,
|
|
5595
|
+
memory: [8]
|
|
5596
|
+
},
|
|
5597
|
+
"RTX 3050 Mobile": {
|
|
5598
|
+
tflops: 7.639,
|
|
5599
|
+
memory: [6]
|
|
5600
|
+
},
|
|
5559
5601
|
"RTX 2060 Mobile": {
|
|
5560
5602
|
tflops: 9.22,
|
|
5561
5603
|
memory: [6]
|
|
5562
5604
|
},
|
|
5605
|
+
"GTX 1080 Ti": {
|
|
5606
|
+
tflops: 11.34,
|
|
5607
|
+
// float32 (GPU does not support native float16)
|
|
5608
|
+
memory: [11]
|
|
5609
|
+
},
|
|
5610
|
+
"GTX 1070 Ti": {
|
|
5611
|
+
tflops: 8.2,
|
|
5612
|
+
// float32 (GPU does not support native float16)
|
|
5613
|
+
memory: [8]
|
|
5614
|
+
},
|
|
5563
5615
|
"RTX Titan": {
|
|
5564
5616
|
tflops: 32.62,
|
|
5565
5617
|
memory: [24]
|
|
5618
|
+
},
|
|
5619
|
+
"GTX 1660": {
|
|
5620
|
+
tflops: 10.05,
|
|
5621
|
+
memory: [6]
|
|
5622
|
+
},
|
|
5623
|
+
"GTX 1650 Mobile": {
|
|
5624
|
+
tflops: 6.39,
|
|
5625
|
+
memory: [4]
|
|
5626
|
+
},
|
|
5627
|
+
T4: {
|
|
5628
|
+
tflops: 65.13,
|
|
5629
|
+
memory: [16]
|
|
5630
|
+
},
|
|
5631
|
+
V100: {
|
|
5632
|
+
tflops: 28.26,
|
|
5633
|
+
memory: [32, 16]
|
|
5634
|
+
},
|
|
5635
|
+
"Quadro P6000": {
|
|
5636
|
+
tflops: 12.63,
|
|
5637
|
+
// float32 (GPU does not support native float16)
|
|
5638
|
+
memory: [24]
|
|
5639
|
+
},
|
|
5640
|
+
P40: {
|
|
5641
|
+
tflops: 11.76,
|
|
5642
|
+
// float32 (GPU does not support native float16)
|
|
5643
|
+
memory: [24]
|
|
5566
5644
|
}
|
|
5567
5645
|
},
|
|
5568
5646
|
AMD: {
|
|
@@ -5578,6 +5656,10 @@ var SKUS = {
|
|
|
5578
5656
|
tflops: 181,
|
|
5579
5657
|
memory: [64]
|
|
5580
5658
|
},
|
|
5659
|
+
MI100: {
|
|
5660
|
+
tflops: 184.6,
|
|
5661
|
+
memory: [32]
|
|
5662
|
+
},
|
|
5581
5663
|
"RX 7900 XTX": {
|
|
5582
5664
|
tflops: 122.8,
|
|
5583
5665
|
memory: [24]
|
|
@@ -5601,6 +5683,18 @@ var SKUS = {
|
|
|
5601
5683
|
"RX 7600 XT": {
|
|
5602
5684
|
tflops: 45.14,
|
|
5603
5685
|
memory: [16, 8]
|
|
5686
|
+
},
|
|
5687
|
+
"RX 6950 XT": {
|
|
5688
|
+
tflops: 47.31,
|
|
5689
|
+
memory: [16]
|
|
5690
|
+
},
|
|
5691
|
+
"RX 6800": {
|
|
5692
|
+
tflops: 32.33,
|
|
5693
|
+
memory: [16]
|
|
5694
|
+
},
|
|
5695
|
+
"Radeon Pro VII": {
|
|
5696
|
+
tflops: 26.11,
|
|
5697
|
+
memory: [16]
|
|
5604
5698
|
}
|
|
5605
5699
|
}
|
|
5606
5700
|
},
|
|
@@ -5768,16 +5862,19 @@ function isGgufModel(model) {
|
|
|
5768
5862
|
}
|
|
5769
5863
|
var snippetLlamacpp = (model) => {
|
|
5770
5864
|
return [
|
|
5771
|
-
|
|
5772
|
-
|
|
5773
|
-
|
|
5865
|
+
`## Install llama.cpp via brew
|
|
5866
|
+
brew install llama.cpp
|
|
5867
|
+
|
|
5868
|
+
## or from source with curl support
|
|
5869
|
+
## see llama.cpp README for compilation flags to optimize for your hardware
|
|
5870
|
+
git clone https://github.com/ggerganov/llama.cpp
|
|
5774
5871
|
cd llama.cpp
|
|
5775
5872
|
LLAMA_CURL=1 make
|
|
5776
5873
|
`,
|
|
5777
5874
|
`## Load and run the model
|
|
5778
|
-
|
|
5875
|
+
llama \\
|
|
5779
5876
|
--hf-repo "${model.id}" \\
|
|
5780
|
-
-
|
|
5877
|
+
--hf-file file.gguf \\
|
|
5781
5878
|
-p "I believe the meaning of life is" \\
|
|
5782
5879
|
-n 128`
|
|
5783
5880
|
];
|
|
@@ -5811,6 +5908,13 @@ var LOCAL_APPS = {
|
|
|
5811
5908
|
displayOnModelPage: isGgufModel,
|
|
5812
5909
|
deeplink: (model) => new URL(`https://backyard.ai/hf/model/${model.id}`)
|
|
5813
5910
|
},
|
|
5911
|
+
sanctum: {
|
|
5912
|
+
prettyLabel: "Sanctum",
|
|
5913
|
+
docsUrl: "https://sanctum.ai",
|
|
5914
|
+
mainTask: "text-generation",
|
|
5915
|
+
displayOnModelPage: isGgufModel,
|
|
5916
|
+
deeplink: (model) => new URL(`sanctum://open_from_hf?model=${model.id}`)
|
|
5917
|
+
},
|
|
5814
5918
|
drawthings: {
|
|
5815
5919
|
prettyLabel: "Draw Things",
|
|
5816
5920
|
docsUrl: "https://drawthings.ai",
|
package/dist/index.js
CHANGED
|
@@ -1503,16 +1503,16 @@ var data_default2 = taskData2;
|
|
|
1503
1503
|
var taskData3 = {
|
|
1504
1504
|
datasets: [
|
|
1505
1505
|
{
|
|
1506
|
-
description: "
|
|
1507
|
-
id: "mozilla-foundation/
|
|
1506
|
+
description: "31,175 hours of multilingual audio-text dataset in 108 languages.",
|
|
1507
|
+
id: "mozilla-foundation/common_voice_17_0"
|
|
1508
1508
|
},
|
|
1509
1509
|
{
|
|
1510
1510
|
description: "An English dataset with 1,000 hours of data.",
|
|
1511
1511
|
id: "librispeech_asr"
|
|
1512
1512
|
},
|
|
1513
1513
|
{
|
|
1514
|
-
description: "
|
|
1515
|
-
id: "
|
|
1514
|
+
description: "A multi-lingual audio dataset with 370K hours of audio.",
|
|
1515
|
+
id: "espnet/yodas"
|
|
1516
1516
|
}
|
|
1517
1517
|
],
|
|
1518
1518
|
demo: {
|
|
@@ -1547,12 +1547,12 @@ var taskData3 = {
|
|
|
1547
1547
|
id: "openai/whisper-large-v3"
|
|
1548
1548
|
},
|
|
1549
1549
|
{
|
|
1550
|
-
description: "A good generic
|
|
1551
|
-
id: "facebook/
|
|
1550
|
+
description: "A good generic speech model by MetaAI for fine-tuning.",
|
|
1551
|
+
id: "facebook/w2v-bert-2.0"
|
|
1552
1552
|
},
|
|
1553
1553
|
{
|
|
1554
1554
|
description: "An end-to-end model that performs ASR and Speech Translation by MetaAI.",
|
|
1555
|
-
id: "facebook/
|
|
1555
|
+
id: "facebook/seamless-m4t-v2-large"
|
|
1556
1556
|
}
|
|
1557
1557
|
],
|
|
1558
1558
|
spaces: [
|
|
@@ -2954,8 +2954,8 @@ var taskData24 = {
|
|
|
2954
2954
|
canonicalId: "text-to-audio",
|
|
2955
2955
|
datasets: [
|
|
2956
2956
|
{
|
|
2957
|
-
description: "
|
|
2958
|
-
id: "
|
|
2957
|
+
description: "10K hours of multi-speaker English dataset.",
|
|
2958
|
+
id: "parler-tts/mls_eng_10k"
|
|
2959
2959
|
},
|
|
2960
2960
|
{
|
|
2961
2961
|
description: "Multi-speaker English dataset.",
|
|
@@ -2993,8 +2993,8 @@ var taskData24 = {
|
|
|
2993
2993
|
id: "facebook/mms-tts"
|
|
2994
2994
|
},
|
|
2995
2995
|
{
|
|
2996
|
-
description: "
|
|
2997
|
-
id: "
|
|
2996
|
+
description: "A prompt based, powerful TTS model.",
|
|
2997
|
+
id: "parler-tts/parler_tts_mini_v0.1"
|
|
2998
2998
|
}
|
|
2999
2999
|
],
|
|
3000
3000
|
spaces: [
|
|
@@ -3007,8 +3007,8 @@ var taskData24 = {
|
|
|
3007
3007
|
id: "coqui/xtts"
|
|
3008
3008
|
},
|
|
3009
3009
|
{
|
|
3010
|
-
description: "An application that synthesizes speech for
|
|
3011
|
-
id: "
|
|
3010
|
+
description: "An application that synthesizes speech for diverse speaker prompts.",
|
|
3011
|
+
id: "parler-tts/parler_tts_mini"
|
|
3012
3012
|
}
|
|
3013
3013
|
],
|
|
3014
3014
|
summary: "Text-to-Speech (TTS) is the task of generating natural sounding speech given text input. TTS models can be extended to have a single model that generates speech for multiple speakers and multiple languages.",
|
|
@@ -4388,13 +4388,15 @@ var transformers = (model) => {
|
|
|
4388
4388
|
].join("\n");
|
|
4389
4389
|
}
|
|
4390
4390
|
if (model.pipeline_tag && LIBRARY_TASK_MAPPING.transformers?.includes(model.pipeline_tag)) {
|
|
4391
|
-
const pipelineSnippet = [
|
|
4392
|
-
|
|
4393
|
-
"
|
|
4394
|
-
|
|
4395
|
-
|
|
4396
|
-
|
|
4397
|
-
|
|
4391
|
+
const pipelineSnippet = ["# Use a pipeline as a high-level helper", "from transformers import pipeline", ""];
|
|
4392
|
+
if (model.tags.includes("conversational") && model.config?.tokenizer_config?.chat_template) {
|
|
4393
|
+
pipelineSnippet.push("messages = [", ' {"role": "user", "content": "Who are you?"},', "]");
|
|
4394
|
+
}
|
|
4395
|
+
pipelineSnippet.push(`pipe = pipeline("${model.pipeline_tag}", model="${model.id}"` + remote_code_snippet + ")");
|
|
4396
|
+
if (model.tags.includes("conversational") && model.config?.tokenizer_config?.chat_template) {
|
|
4397
|
+
pipelineSnippet.push("pipe(messages)");
|
|
4398
|
+
}
|
|
4399
|
+
return [pipelineSnippet.join("\n"), autoSnippet];
|
|
4398
4400
|
}
|
|
4399
4401
|
return [autoSnippet];
|
|
4400
4402
|
};
|
|
@@ -5438,6 +5440,30 @@ var SKUS = {
|
|
|
5438
5440
|
tflops: 91.1,
|
|
5439
5441
|
memory: [48]
|
|
5440
5442
|
},
|
|
5443
|
+
"RTX 5880 Ada": {
|
|
5444
|
+
tflops: 69.3,
|
|
5445
|
+
memory: [48]
|
|
5446
|
+
},
|
|
5447
|
+
"RTX 5000 Ada": {
|
|
5448
|
+
tflops: 65.3,
|
|
5449
|
+
memory: [32]
|
|
5450
|
+
},
|
|
5451
|
+
"RTX 4500 Ada": {
|
|
5452
|
+
tflops: 39.6,
|
|
5453
|
+
memory: [24]
|
|
5454
|
+
},
|
|
5455
|
+
"RTX 4000 Ada": {
|
|
5456
|
+
tflops: 26.7,
|
|
5457
|
+
memory: [20]
|
|
5458
|
+
},
|
|
5459
|
+
"RTX 4000 SFF Ada": {
|
|
5460
|
+
tflops: 19.2,
|
|
5461
|
+
memory: [20]
|
|
5462
|
+
},
|
|
5463
|
+
"RTX 2000 Ada": {
|
|
5464
|
+
tflops: 12,
|
|
5465
|
+
memory: [16]
|
|
5466
|
+
},
|
|
5441
5467
|
A100: {
|
|
5442
5468
|
tflops: 77.97,
|
|
5443
5469
|
memory: [80, 40]
|
|
@@ -5450,14 +5476,14 @@ var SKUS = {
|
|
|
5450
5476
|
tflops: 31.24,
|
|
5451
5477
|
memory: [24]
|
|
5452
5478
|
},
|
|
5453
|
-
T4: {
|
|
5454
|
-
tflops: 65.13,
|
|
5455
|
-
memory: [16]
|
|
5456
|
-
},
|
|
5457
5479
|
"RTX 4090": {
|
|
5458
5480
|
tflops: 82.58,
|
|
5459
5481
|
memory: [24]
|
|
5460
5482
|
},
|
|
5483
|
+
"RTX 4090D": {
|
|
5484
|
+
tflops: 79.49,
|
|
5485
|
+
memory: [24]
|
|
5486
|
+
},
|
|
5461
5487
|
"RTX 4080 SUPER": {
|
|
5462
5488
|
tflops: 52.2,
|
|
5463
5489
|
memory: [16]
|
|
@@ -5482,6 +5508,14 @@ var SKUS = {
|
|
|
5482
5508
|
tflops: 44.1,
|
|
5483
5509
|
memory: [16]
|
|
5484
5510
|
},
|
|
5511
|
+
"RTX 4060": {
|
|
5512
|
+
tflops: 15.11,
|
|
5513
|
+
memory: [8]
|
|
5514
|
+
},
|
|
5515
|
+
"RTX 4060 Ti": {
|
|
5516
|
+
tflops: 22.06,
|
|
5517
|
+
memory: [8, 16]
|
|
5518
|
+
},
|
|
5485
5519
|
"RTX 3090": {
|
|
5486
5520
|
tflops: 35.58,
|
|
5487
5521
|
memory: [24]
|
|
@@ -5518,13 +5552,57 @@ var SKUS = {
|
|
|
5518
5552
|
tflops: 12.74,
|
|
5519
5553
|
memory: [12, 8]
|
|
5520
5554
|
},
|
|
5555
|
+
"RTX 2070": {
|
|
5556
|
+
tflops: 14.93,
|
|
5557
|
+
memory: [8]
|
|
5558
|
+
},
|
|
5559
|
+
"RTX 3050 Mobile": {
|
|
5560
|
+
tflops: 7.639,
|
|
5561
|
+
memory: [6]
|
|
5562
|
+
},
|
|
5521
5563
|
"RTX 2060 Mobile": {
|
|
5522
5564
|
tflops: 9.22,
|
|
5523
5565
|
memory: [6]
|
|
5524
5566
|
},
|
|
5567
|
+
"GTX 1080 Ti": {
|
|
5568
|
+
tflops: 11.34,
|
|
5569
|
+
// float32 (GPU does not support native float16)
|
|
5570
|
+
memory: [11]
|
|
5571
|
+
},
|
|
5572
|
+
"GTX 1070 Ti": {
|
|
5573
|
+
tflops: 8.2,
|
|
5574
|
+
// float32 (GPU does not support native float16)
|
|
5575
|
+
memory: [8]
|
|
5576
|
+
},
|
|
5525
5577
|
"RTX Titan": {
|
|
5526
5578
|
tflops: 32.62,
|
|
5527
5579
|
memory: [24]
|
|
5580
|
+
},
|
|
5581
|
+
"GTX 1660": {
|
|
5582
|
+
tflops: 10.05,
|
|
5583
|
+
memory: [6]
|
|
5584
|
+
},
|
|
5585
|
+
"GTX 1650 Mobile": {
|
|
5586
|
+
tflops: 6.39,
|
|
5587
|
+
memory: [4]
|
|
5588
|
+
},
|
|
5589
|
+
T4: {
|
|
5590
|
+
tflops: 65.13,
|
|
5591
|
+
memory: [16]
|
|
5592
|
+
},
|
|
5593
|
+
V100: {
|
|
5594
|
+
tflops: 28.26,
|
|
5595
|
+
memory: [32, 16]
|
|
5596
|
+
},
|
|
5597
|
+
"Quadro P6000": {
|
|
5598
|
+
tflops: 12.63,
|
|
5599
|
+
// float32 (GPU does not support native float16)
|
|
5600
|
+
memory: [24]
|
|
5601
|
+
},
|
|
5602
|
+
P40: {
|
|
5603
|
+
tflops: 11.76,
|
|
5604
|
+
// float32 (GPU does not support native float16)
|
|
5605
|
+
memory: [24]
|
|
5528
5606
|
}
|
|
5529
5607
|
},
|
|
5530
5608
|
AMD: {
|
|
@@ -5540,6 +5618,10 @@ var SKUS = {
|
|
|
5540
5618
|
tflops: 181,
|
|
5541
5619
|
memory: [64]
|
|
5542
5620
|
},
|
|
5621
|
+
MI100: {
|
|
5622
|
+
tflops: 184.6,
|
|
5623
|
+
memory: [32]
|
|
5624
|
+
},
|
|
5543
5625
|
"RX 7900 XTX": {
|
|
5544
5626
|
tflops: 122.8,
|
|
5545
5627
|
memory: [24]
|
|
@@ -5563,6 +5645,18 @@ var SKUS = {
|
|
|
5563
5645
|
"RX 7600 XT": {
|
|
5564
5646
|
tflops: 45.14,
|
|
5565
5647
|
memory: [16, 8]
|
|
5648
|
+
},
|
|
5649
|
+
"RX 6950 XT": {
|
|
5650
|
+
tflops: 47.31,
|
|
5651
|
+
memory: [16]
|
|
5652
|
+
},
|
|
5653
|
+
"RX 6800": {
|
|
5654
|
+
tflops: 32.33,
|
|
5655
|
+
memory: [16]
|
|
5656
|
+
},
|
|
5657
|
+
"Radeon Pro VII": {
|
|
5658
|
+
tflops: 26.11,
|
|
5659
|
+
memory: [16]
|
|
5566
5660
|
}
|
|
5567
5661
|
}
|
|
5568
5662
|
},
|
|
@@ -5730,16 +5824,19 @@ function isGgufModel(model) {
|
|
|
5730
5824
|
}
|
|
5731
5825
|
var snippetLlamacpp = (model) => {
|
|
5732
5826
|
return [
|
|
5733
|
-
|
|
5734
|
-
|
|
5735
|
-
|
|
5827
|
+
`## Install llama.cpp via brew
|
|
5828
|
+
brew install llama.cpp
|
|
5829
|
+
|
|
5830
|
+
## or from source with curl support
|
|
5831
|
+
## see llama.cpp README for compilation flags to optimize for your hardware
|
|
5832
|
+
git clone https://github.com/ggerganov/llama.cpp
|
|
5736
5833
|
cd llama.cpp
|
|
5737
5834
|
LLAMA_CURL=1 make
|
|
5738
5835
|
`,
|
|
5739
5836
|
`## Load and run the model
|
|
5740
|
-
|
|
5837
|
+
llama \\
|
|
5741
5838
|
--hf-repo "${model.id}" \\
|
|
5742
|
-
-
|
|
5839
|
+
--hf-file file.gguf \\
|
|
5743
5840
|
-p "I believe the meaning of life is" \\
|
|
5744
5841
|
-n 128`
|
|
5745
5842
|
];
|
|
@@ -5773,6 +5870,13 @@ var LOCAL_APPS = {
|
|
|
5773
5870
|
displayOnModelPage: isGgufModel,
|
|
5774
5871
|
deeplink: (model) => new URL(`https://backyard.ai/hf/model/${model.id}`)
|
|
5775
5872
|
},
|
|
5873
|
+
sanctum: {
|
|
5874
|
+
prettyLabel: "Sanctum",
|
|
5875
|
+
docsUrl: "https://sanctum.ai",
|
|
5876
|
+
mainTask: "text-generation",
|
|
5877
|
+
displayOnModelPage: isGgufModel,
|
|
5878
|
+
deeplink: (model) => new URL(`sanctum://open_from_hf?model=${model.id}`)
|
|
5879
|
+
},
|
|
5776
5880
|
drawthings: {
|
|
5777
5881
|
prettyLabel: "Draw Things",
|
|
5778
5882
|
docsUrl: "https://drawthings.ai",
|
package/dist/src/hardware.d.ts
CHANGED
|
@@ -44,6 +44,30 @@ export declare const SKUS: {
|
|
|
44
44
|
tflops: number;
|
|
45
45
|
memory: number[];
|
|
46
46
|
};
|
|
47
|
+
"RTX 5880 Ada": {
|
|
48
|
+
tflops: number;
|
|
49
|
+
memory: number[];
|
|
50
|
+
};
|
|
51
|
+
"RTX 5000 Ada": {
|
|
52
|
+
tflops: number;
|
|
53
|
+
memory: number[];
|
|
54
|
+
};
|
|
55
|
+
"RTX 4500 Ada": {
|
|
56
|
+
tflops: number;
|
|
57
|
+
memory: number[];
|
|
58
|
+
};
|
|
59
|
+
"RTX 4000 Ada": {
|
|
60
|
+
tflops: number;
|
|
61
|
+
memory: number[];
|
|
62
|
+
};
|
|
63
|
+
"RTX 4000 SFF Ada": {
|
|
64
|
+
tflops: number;
|
|
65
|
+
memory: number[];
|
|
66
|
+
};
|
|
67
|
+
"RTX 2000 Ada": {
|
|
68
|
+
tflops: number;
|
|
69
|
+
memory: number[];
|
|
70
|
+
};
|
|
47
71
|
A100: {
|
|
48
72
|
tflops: number;
|
|
49
73
|
memory: number[];
|
|
@@ -56,11 +80,11 @@ export declare const SKUS: {
|
|
|
56
80
|
tflops: number;
|
|
57
81
|
memory: number[];
|
|
58
82
|
};
|
|
59
|
-
|
|
83
|
+
"RTX 4090": {
|
|
60
84
|
tflops: number;
|
|
61
85
|
memory: number[];
|
|
62
86
|
};
|
|
63
|
-
"RTX
|
|
87
|
+
"RTX 4090D": {
|
|
64
88
|
tflops: number;
|
|
65
89
|
memory: number[];
|
|
66
90
|
};
|
|
@@ -88,6 +112,14 @@ export declare const SKUS: {
|
|
|
88
112
|
tflops: number;
|
|
89
113
|
memory: number[];
|
|
90
114
|
};
|
|
115
|
+
"RTX 4060": {
|
|
116
|
+
tflops: number;
|
|
117
|
+
memory: number[];
|
|
118
|
+
};
|
|
119
|
+
"RTX 4060 Ti": {
|
|
120
|
+
tflops: number;
|
|
121
|
+
memory: number[];
|
|
122
|
+
};
|
|
91
123
|
"RTX 3090": {
|
|
92
124
|
tflops: number;
|
|
93
125
|
memory: number[];
|
|
@@ -124,14 +156,54 @@ export declare const SKUS: {
|
|
|
124
156
|
tflops: number;
|
|
125
157
|
memory: number[];
|
|
126
158
|
};
|
|
159
|
+
"RTX 2070": {
|
|
160
|
+
tflops: number;
|
|
161
|
+
memory: number[];
|
|
162
|
+
};
|
|
163
|
+
"RTX 3050 Mobile": {
|
|
164
|
+
tflops: number;
|
|
165
|
+
memory: number[];
|
|
166
|
+
};
|
|
127
167
|
"RTX 2060 Mobile": {
|
|
128
168
|
tflops: number;
|
|
129
169
|
memory: number[];
|
|
130
170
|
};
|
|
171
|
+
"GTX 1080 Ti": {
|
|
172
|
+
tflops: number;
|
|
173
|
+
memory: number[];
|
|
174
|
+
};
|
|
175
|
+
"GTX 1070 Ti": {
|
|
176
|
+
tflops: number;
|
|
177
|
+
memory: number[];
|
|
178
|
+
};
|
|
131
179
|
"RTX Titan": {
|
|
132
180
|
tflops: number;
|
|
133
181
|
memory: number[];
|
|
134
182
|
};
|
|
183
|
+
"GTX 1660": {
|
|
184
|
+
tflops: number;
|
|
185
|
+
memory: number[];
|
|
186
|
+
};
|
|
187
|
+
"GTX 1650 Mobile": {
|
|
188
|
+
tflops: number;
|
|
189
|
+
memory: number[];
|
|
190
|
+
};
|
|
191
|
+
T4: {
|
|
192
|
+
tflops: number;
|
|
193
|
+
memory: number[];
|
|
194
|
+
};
|
|
195
|
+
V100: {
|
|
196
|
+
tflops: number;
|
|
197
|
+
memory: number[];
|
|
198
|
+
};
|
|
199
|
+
"Quadro P6000": {
|
|
200
|
+
tflops: number;
|
|
201
|
+
memory: number[];
|
|
202
|
+
};
|
|
203
|
+
P40: {
|
|
204
|
+
tflops: number;
|
|
205
|
+
memory: number[];
|
|
206
|
+
};
|
|
135
207
|
};
|
|
136
208
|
AMD: {
|
|
137
209
|
MI300: {
|
|
@@ -146,6 +218,10 @@ export declare const SKUS: {
|
|
|
146
218
|
tflops: number;
|
|
147
219
|
memory: number[];
|
|
148
220
|
};
|
|
221
|
+
MI100: {
|
|
222
|
+
tflops: number;
|
|
223
|
+
memory: number[];
|
|
224
|
+
};
|
|
149
225
|
"RX 7900 XTX": {
|
|
150
226
|
tflops: number;
|
|
151
227
|
memory: number[];
|
|
@@ -170,6 +246,18 @@ export declare const SKUS: {
|
|
|
170
246
|
tflops: number;
|
|
171
247
|
memory: number[];
|
|
172
248
|
};
|
|
249
|
+
"RX 6950 XT": {
|
|
250
|
+
tflops: number;
|
|
251
|
+
memory: number[];
|
|
252
|
+
};
|
|
253
|
+
"RX 6800": {
|
|
254
|
+
tflops: number;
|
|
255
|
+
memory: number[];
|
|
256
|
+
};
|
|
257
|
+
"Radeon Pro VII": {
|
|
258
|
+
tflops: number;
|
|
259
|
+
memory: number[];
|
|
260
|
+
};
|
|
173
261
|
};
|
|
174
262
|
};
|
|
175
263
|
CPU: {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"hardware.d.ts","sourceRoot":"","sources":["../../src/hardware.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,eAAO,MAAM,iDAAiD,QAAW,CAAC;AAC1E,eAAO,MAAM,yDAAyD,QAAW,CAAC;AAClF,eAAO,MAAM,oCAAoC,QAAU,CAAC;AAE5D;;;GAGG;AACH,eAAO,MAAM,+CAA+C,QAAW,CAAC;AAExE,MAAM,WAAW,YAAY;IAC5B;;;;;;;;;OASG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;;OAGG;IACH,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;CAClB;AAED,eAAO,MAAM,sBAAsB,UAAqD,CAAC;AAEzF,eAAO,MAAM,IAAI
|
|
1
|
+
{"version":3,"file":"hardware.d.ts","sourceRoot":"","sources":["../../src/hardware.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,eAAO,MAAM,iDAAiD,QAAW,CAAC;AAC1E,eAAO,MAAM,yDAAyD,QAAW,CAAC;AAClF,eAAO,MAAM,oCAAoC,QAAU,CAAC;AAE5D;;;GAGG;AACH,eAAO,MAAM,+CAA+C,QAAW,CAAC;AAExE,MAAM,WAAW,YAAY;IAC5B;;;;;;;;;OASG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;;OAGG;IACH,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;CAClB;AAED,eAAO,MAAM,sBAAsB,UAAqD,CAAC;AAEzF,eAAO,MAAM,IAAI;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAmYuD,CAAC;AAEzE,MAAM,MAAM,OAAO,GAAG,MAAM,OAAO,IAAI,CAAC"}
|
package/dist/src/local-apps.d.ts
CHANGED
|
@@ -77,6 +77,13 @@ export declare const LOCAL_APPS: {
|
|
|
77
77
|
displayOnModelPage: typeof isGgufModel;
|
|
78
78
|
deeplink: (model: ModelData) => URL;
|
|
79
79
|
};
|
|
80
|
+
sanctum: {
|
|
81
|
+
prettyLabel: string;
|
|
82
|
+
docsUrl: string;
|
|
83
|
+
mainTask: "text-generation";
|
|
84
|
+
displayOnModelPage: typeof isGgufModel;
|
|
85
|
+
deeplink: (model: ModelData) => URL;
|
|
86
|
+
};
|
|
80
87
|
drawthings: {
|
|
81
88
|
prettyLabel: string;
|
|
82
89
|
docsUrl: string;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"local-apps.d.ts","sourceRoot":"","sources":["../../src/local-apps.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAEhD;;GAEG;AACH,MAAM,MAAM,QAAQ,GAAG;IACtB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,QAAQ,EAAE,YAAY,CAAC;IACvB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,kBAAkB,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,OAAO,CAAC;CAClD,GAAG,CACD;IACA;;OAEG;IACH,QAAQ,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,GAAG,CAAC;CACnC,GACD;IACA;;OAEG;IACH,OAAO,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,GAAG,MAAM,EAAE,CAAC;CAChD,CACH,CAAC;AAEF,iBAAS,WAAW,CAAC,KAAK,EAAE,SAAS,WAEpC;
|
|
1
|
+
{"version":3,"file":"local-apps.d.ts","sourceRoot":"","sources":["../../src/local-apps.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAEhD;;GAEG;AACH,MAAM,MAAM,QAAQ,GAAG;IACtB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,QAAQ,EAAE,YAAY,CAAC;IACvB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,kBAAkB,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,OAAO,CAAC;CAClD,GAAG,CACD;IACA;;OAEG;IACH,QAAQ,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,GAAG,CAAC;CACnC,GACD;IACA;;OAEG;IACH,OAAO,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,GAAG,MAAM,EAAE,CAAC;CAChD,CACH,CAAC;AAEF,iBAAS,WAAW,CAAC,KAAK,EAAE,SAAS,WAEpC;AAsBD;;;;;;;;;;GAUG;AACH,eAAO,MAAM,UAAU;;;;;;yBA/BS,SAAS,KAAG,MAAM,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA2FhB,CAAC;AAErC,MAAM,MAAM,WAAW,GAAG,MAAM,OAAO,UAAU,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAY9C,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAMF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AA+BF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAK9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AAEF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAI7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,
|
|
1
|
+
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAY9C,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAMF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AA+BF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAK9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AAEF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAI7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EA4CrD,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAK5C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AA6BF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAUnD,CAAC"}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@huggingface/tasks",
|
|
3
3
|
"packageManager": "pnpm@8.10.5",
|
|
4
|
-
"version": "0.10.
|
|
4
|
+
"version": "0.10.10",
|
|
5
5
|
"description": "List of ML tasks for huggingface.co/tasks",
|
|
6
6
|
"repository": "https://github.com/huggingface/huggingface.js.git",
|
|
7
7
|
"publishConfig": {
|
package/src/hardware.ts
CHANGED
|
@@ -48,6 +48,30 @@ export const SKUS = {
|
|
|
48
48
|
tflops: 91.1,
|
|
49
49
|
memory: [48],
|
|
50
50
|
},
|
|
51
|
+
"RTX 5880 Ada": {
|
|
52
|
+
tflops: 69.3,
|
|
53
|
+
memory: [48],
|
|
54
|
+
},
|
|
55
|
+
"RTX 5000 Ada": {
|
|
56
|
+
tflops: 65.3,
|
|
57
|
+
memory: [32],
|
|
58
|
+
},
|
|
59
|
+
"RTX 4500 Ada": {
|
|
60
|
+
tflops: 39.6,
|
|
61
|
+
memory: [24],
|
|
62
|
+
},
|
|
63
|
+
"RTX 4000 Ada": {
|
|
64
|
+
tflops: 26.7,
|
|
65
|
+
memory: [20],
|
|
66
|
+
},
|
|
67
|
+
"RTX 4000 SFF Ada": {
|
|
68
|
+
tflops: 19.2,
|
|
69
|
+
memory: [20],
|
|
70
|
+
},
|
|
71
|
+
"RTX 2000 Ada": {
|
|
72
|
+
tflops: 12.0,
|
|
73
|
+
memory: [16],
|
|
74
|
+
},
|
|
51
75
|
A100: {
|
|
52
76
|
tflops: 77.97,
|
|
53
77
|
memory: [80, 40],
|
|
@@ -60,14 +84,14 @@ export const SKUS = {
|
|
|
60
84
|
tflops: 31.24,
|
|
61
85
|
memory: [24],
|
|
62
86
|
},
|
|
63
|
-
T4: {
|
|
64
|
-
tflops: 65.13,
|
|
65
|
-
memory: [16],
|
|
66
|
-
},
|
|
67
87
|
"RTX 4090": {
|
|
68
88
|
tflops: 82.58,
|
|
69
89
|
memory: [24],
|
|
70
90
|
},
|
|
91
|
+
"RTX 4090D": {
|
|
92
|
+
tflops: 79.49,
|
|
93
|
+
memory: [24],
|
|
94
|
+
},
|
|
71
95
|
"RTX 4080 SUPER": {
|
|
72
96
|
tflops: 52.2,
|
|
73
97
|
memory: [16],
|
|
@@ -92,6 +116,14 @@ export const SKUS = {
|
|
|
92
116
|
tflops: 44.1,
|
|
93
117
|
memory: [16],
|
|
94
118
|
},
|
|
119
|
+
"RTX 4060": {
|
|
120
|
+
tflops: 15.11,
|
|
121
|
+
memory: [8],
|
|
122
|
+
},
|
|
123
|
+
"RTX 4060 Ti": {
|
|
124
|
+
tflops: 22.06,
|
|
125
|
+
memory: [8, 16],
|
|
126
|
+
},
|
|
95
127
|
"RTX 3090": {
|
|
96
128
|
tflops: 35.58,
|
|
97
129
|
memory: [24],
|
|
@@ -128,14 +160,54 @@ export const SKUS = {
|
|
|
128
160
|
tflops: 12.74,
|
|
129
161
|
memory: [12, 8],
|
|
130
162
|
},
|
|
163
|
+
"RTX 2070": {
|
|
164
|
+
tflops: 14.93,
|
|
165
|
+
memory: [8],
|
|
166
|
+
},
|
|
167
|
+
"RTX 3050 Mobile": {
|
|
168
|
+
tflops: 7.639,
|
|
169
|
+
memory: [6],
|
|
170
|
+
},
|
|
131
171
|
"RTX 2060 Mobile": {
|
|
132
172
|
tflops: 9.22,
|
|
133
173
|
memory: [6],
|
|
134
174
|
},
|
|
175
|
+
"GTX 1080 Ti": {
|
|
176
|
+
tflops: 11.34, // float32 (GPU does not support native float16)
|
|
177
|
+
memory: [11],
|
|
178
|
+
},
|
|
179
|
+
"GTX 1070 Ti": {
|
|
180
|
+
tflops: 8.2, // float32 (GPU does not support native float16)
|
|
181
|
+
memory: [8],
|
|
182
|
+
},
|
|
135
183
|
"RTX Titan": {
|
|
136
184
|
tflops: 32.62,
|
|
137
185
|
memory: [24],
|
|
138
186
|
},
|
|
187
|
+
"GTX 1660": {
|
|
188
|
+
tflops: 10.05,
|
|
189
|
+
memory: [6],
|
|
190
|
+
},
|
|
191
|
+
"GTX 1650 Mobile": {
|
|
192
|
+
tflops: 6.39,
|
|
193
|
+
memory: [4],
|
|
194
|
+
},
|
|
195
|
+
T4: {
|
|
196
|
+
tflops: 65.13,
|
|
197
|
+
memory: [16],
|
|
198
|
+
},
|
|
199
|
+
V100: {
|
|
200
|
+
tflops: 28.26,
|
|
201
|
+
memory: [32, 16],
|
|
202
|
+
},
|
|
203
|
+
"Quadro P6000": {
|
|
204
|
+
tflops: 12.63, // float32 (GPU does not support native float16)
|
|
205
|
+
memory: [24],
|
|
206
|
+
},
|
|
207
|
+
P40: {
|
|
208
|
+
tflops: 11.76, // float32 (GPU does not support native float16)
|
|
209
|
+
memory: [24],
|
|
210
|
+
},
|
|
139
211
|
},
|
|
140
212
|
AMD: {
|
|
141
213
|
MI300: {
|
|
@@ -150,6 +222,10 @@ export const SKUS = {
|
|
|
150
222
|
tflops: 181.0,
|
|
151
223
|
memory: [64],
|
|
152
224
|
},
|
|
225
|
+
MI100: {
|
|
226
|
+
tflops: 184.6,
|
|
227
|
+
memory: [32],
|
|
228
|
+
},
|
|
153
229
|
"RX 7900 XTX": {
|
|
154
230
|
tflops: 122.8,
|
|
155
231
|
memory: [24],
|
|
@@ -174,6 +250,18 @@ export const SKUS = {
|
|
|
174
250
|
tflops: 45.14,
|
|
175
251
|
memory: [16, 8],
|
|
176
252
|
},
|
|
253
|
+
"RX 6950 XT": {
|
|
254
|
+
tflops: 47.31,
|
|
255
|
+
memory: [16],
|
|
256
|
+
},
|
|
257
|
+
"RX 6800": {
|
|
258
|
+
tflops: 32.33,
|
|
259
|
+
memory: [16],
|
|
260
|
+
},
|
|
261
|
+
"Radeon Pro VII": {
|
|
262
|
+
tflops: 26.11,
|
|
263
|
+
memory: [16],
|
|
264
|
+
},
|
|
177
265
|
},
|
|
178
266
|
},
|
|
179
267
|
CPU: {
|
package/src/local-apps.ts
CHANGED
|
@@ -48,16 +48,19 @@ function isGgufModel(model: ModelData) {
|
|
|
48
48
|
|
|
49
49
|
const snippetLlamacpp = (model: ModelData): string[] => {
|
|
50
50
|
return [
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
51
|
+
`## Install llama.cpp via brew
|
|
52
|
+
brew install llama.cpp
|
|
53
|
+
|
|
54
|
+
## or from source with curl support
|
|
55
|
+
## see llama.cpp README for compilation flags to optimize for your hardware
|
|
56
|
+
git clone https://github.com/ggerganov/llama.cpp
|
|
54
57
|
cd llama.cpp
|
|
55
58
|
LLAMA_CURL=1 make
|
|
56
59
|
`,
|
|
57
60
|
`## Load and run the model
|
|
58
|
-
|
|
61
|
+
llama \\
|
|
59
62
|
--hf-repo "${model.id}" \\
|
|
60
|
-
-
|
|
63
|
+
--hf-file file.gguf \\
|
|
61
64
|
-p "I believe the meaning of life is" \\
|
|
62
65
|
-n 128`,
|
|
63
66
|
];
|
|
@@ -103,6 +106,13 @@ export const LOCAL_APPS = {
|
|
|
103
106
|
displayOnModelPage: isGgufModel,
|
|
104
107
|
deeplink: (model) => new URL(`https://backyard.ai/hf/model/${model.id}`),
|
|
105
108
|
},
|
|
109
|
+
sanctum: {
|
|
110
|
+
prettyLabel: "Sanctum",
|
|
111
|
+
docsUrl: "https://sanctum.ai",
|
|
112
|
+
mainTask: "text-generation",
|
|
113
|
+
displayOnModelPage: isGgufModel,
|
|
114
|
+
deeplink: (model) => new URL(`sanctum://open_from_hf?model=${model.id}`),
|
|
115
|
+
},
|
|
106
116
|
drawthings: {
|
|
107
117
|
prettyLabel: "Draw Things",
|
|
108
118
|
docsUrl: "https://drawthings.ai",
|
|
@@ -440,13 +440,17 @@ export const transformers = (model: ModelData): string[] => {
|
|
|
440
440
|
}
|
|
441
441
|
|
|
442
442
|
if (model.pipeline_tag && LIBRARY_TASK_MAPPING.transformers?.includes(model.pipeline_tag)) {
|
|
443
|
-
const pipelineSnippet = [
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
"",
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
443
|
+
const pipelineSnippet = ["# Use a pipeline as a high-level helper", "from transformers import pipeline", ""];
|
|
444
|
+
|
|
445
|
+
if (model.tags.includes("conversational") && model.config?.tokenizer_config?.chat_template) {
|
|
446
|
+
pipelineSnippet.push("messages = [", ' {"role": "user", "content": "Who are you?"},', "]");
|
|
447
|
+
}
|
|
448
|
+
pipelineSnippet.push(`pipe = pipeline("${model.pipeline_tag}", model="${model.id}"` + remote_code_snippet + ")");
|
|
449
|
+
if (model.tags.includes("conversational") && model.config?.tokenizer_config?.chat_template) {
|
|
450
|
+
pipelineSnippet.push("pipe(messages)");
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
return [pipelineSnippet.join("\n"), autoSnippet];
|
|
450
454
|
}
|
|
451
455
|
return [autoSnippet];
|
|
452
456
|
};
|
|
@@ -18,7 +18,7 @@ The use of Multilingual ASR has become popular, the idea of maintaining just a s
|
|
|
18
18
|
|
|
19
19
|
## Inference
|
|
20
20
|
|
|
21
|
-
The Hub contains over [
|
|
21
|
+
The Hub contains over [17,000 ASR models](https://huggingface.co/models?pipeline_tag=automatic-speech-recognition&sort=downloads) that you can test right away in your browser using the model page widgets. You can also use any model as a service using the Serverless Inference API. We also support libraries such as [transformers](https://huggingface.co/models?library=transformers&pipeline_tag=automatic-speech-recognition&sort=downloads), [speechbrain](https://huggingface.co/models?library=speechbrain&pipeline_tag=automatic-speech-recognition&sort=downloads), [NeMo](https://huggingface.co/models?pipeline_tag=automatic-speech-recognition&library=nemo&sort=downloads) and [espnet](https://huggingface.co/models?library=espnet&pipeline_tag=automatic-speech-recognition&sort=downloads) via the Serverless Inference API. Here's a simple code snippet to run inference:
|
|
22
22
|
|
|
23
23
|
```python
|
|
24
24
|
import json
|
|
@@ -36,20 +36,7 @@ def query(filename):
|
|
|
36
36
|
data = query("sample1.flac")
|
|
37
37
|
```
|
|
38
38
|
|
|
39
|
-
You can also use
|
|
40
|
-
|
|
41
|
-
```python
|
|
42
|
-
from transformers import pipeline
|
|
43
|
-
|
|
44
|
-
with open("sample.flac", "rb") as f:
|
|
45
|
-
data = f.read()
|
|
46
|
-
|
|
47
|
-
pipe = pipeline("automatic-speech-recognition", "openai/whisper-large-v2")
|
|
48
|
-
pipe("sample.flac")
|
|
49
|
-
# {'text': "GOING ALONG SLUSHY COUNTRY ROADS AND SPEAKING TO DAMP AUDIENCES IN DRAUGHTY SCHOOL ROOMS DAY AFTER DAY FOR A FORTNIGHT HE'LL HAVE TO PUT IN AN APPEARANCE AT SOME PLACE OF WORSHIP ON SUNDAY MORNING AND HE CAN COME TO US IMMEDIATELY AFTERWARDS"}
|
|
50
|
-
```
|
|
51
|
-
|
|
52
|
-
You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to transcribe text with javascript using models on Hugging Face Hub.
|
|
39
|
+
You can also use[huggingface.js](https://github.com/huggingface/huggingface.js), the JavaScript client, to transcribe models with the Inference API.
|
|
53
40
|
|
|
54
41
|
```javascript
|
|
55
42
|
import { HfInference } from "@huggingface/inference";
|
|
@@ -57,10 +44,23 @@ import { HfInference } from "@huggingface/inference";
|
|
|
57
44
|
const inference = new HfInference(HF_TOKEN);
|
|
58
45
|
await inference.automaticSpeechRecognition({
|
|
59
46
|
data: await (await fetch("sample.flac")).blob(),
|
|
60
|
-
model: "openai/whisper-large-
|
|
47
|
+
model: "openai/whisper-large-v3",
|
|
61
48
|
});
|
|
62
49
|
```
|
|
63
50
|
|
|
51
|
+
For transformers compatible models like Whisper, Wav2Vec2, HuBERT, etc. You can also run inference in Python using transformers as follows:
|
|
52
|
+
|
|
53
|
+
```python
|
|
54
|
+
# pip install --upgrade transformers
|
|
55
|
+
|
|
56
|
+
from transformers import pipeline
|
|
57
|
+
|
|
58
|
+
pipe = pipeline("automatic-speech-recognition", "openai/whisper-large-v3")
|
|
59
|
+
|
|
60
|
+
pipe("sample.flac")
|
|
61
|
+
# {'text': "GOING ALONG SLUSHY COUNTRY ROADS AND SPEAKING TO DAMP AUDIENCES IN DRAUGHTY SCHOOL ROOMS DAY AFTER DAY FOR A FORTNIGHT HE'LL HAVE TO PUT IN AN APPEARANCE AT SOME PLACE OF WORSHIP ON SUNDAY MORNING AND HE CAN COME TO US IMMEDIATELY AFTERWARDS"}
|
|
62
|
+
```
|
|
63
|
+
|
|
64
64
|
## Solving ASR for your own data
|
|
65
65
|
|
|
66
66
|
We have some great news! You can fine-tune (transfer learning) a foundational speech model on a specific language without tonnes of data. Pretrained models such as Whisper, Wav2Vec2-MMS and HuBERT exist. [OpenAI's Whisper model](https://huggingface.co/openai/whisper-large-v3) is a large multilingual model trained on 100+ languages and with 4 Million hours of speech.
|
|
@@ -3,16 +3,16 @@ import type { TaskDataCustom } from "..";
|
|
|
3
3
|
const taskData: TaskDataCustom = {
|
|
4
4
|
datasets: [
|
|
5
5
|
{
|
|
6
|
-
description: "
|
|
7
|
-
id: "mozilla-foundation/
|
|
6
|
+
description: "31,175 hours of multilingual audio-text dataset in 108 languages.",
|
|
7
|
+
id: "mozilla-foundation/common_voice_17_0",
|
|
8
8
|
},
|
|
9
9
|
{
|
|
10
10
|
description: "An English dataset with 1,000 hours of data.",
|
|
11
11
|
id: "librispeech_asr",
|
|
12
12
|
},
|
|
13
13
|
{
|
|
14
|
-
description: "
|
|
15
|
-
id: "
|
|
14
|
+
description: "A multi-lingual audio dataset with 370K hours of audio.",
|
|
15
|
+
id: "espnet/yodas",
|
|
16
16
|
},
|
|
17
17
|
],
|
|
18
18
|
demo: {
|
|
@@ -47,12 +47,12 @@ const taskData: TaskDataCustom = {
|
|
|
47
47
|
id: "openai/whisper-large-v3",
|
|
48
48
|
},
|
|
49
49
|
{
|
|
50
|
-
description: "A good generic
|
|
51
|
-
id: "facebook/
|
|
50
|
+
description: "A good generic speech model by MetaAI for fine-tuning.",
|
|
51
|
+
id: "facebook/w2v-bert-2.0",
|
|
52
52
|
},
|
|
53
53
|
{
|
|
54
54
|
description: "An end-to-end model that performs ASR and Speech Translation by MetaAI.",
|
|
55
|
-
id: "facebook/
|
|
55
|
+
id: "facebook/seamless-m4t-v2-large",
|
|
56
56
|
},
|
|
57
57
|
],
|
|
58
58
|
spaces: [
|
|
@@ -58,8 +58,6 @@ await inference.textToSpeech({
|
|
|
58
58
|
|
|
59
59
|
- [Hugging Face Audio Course](https://huggingface.co/learn/audio-course/chapter6/introduction)
|
|
60
60
|
- [ML for Audio Study Group - Text to Speech Deep Dive](https://www.youtube.com/watch?v=aLBedWj-5CQ)
|
|
61
|
-
- [An introduction to SpeechT5, a multi-purpose speech recognition and synthesis model](https://huggingface.co/blog/speecht5).
|
|
62
|
-
- [A guide on Fine-tuning Whisper For Multilingual ASR with 🤗Transformers](https://huggingface.co/blog/fine-tune-whisper)
|
|
63
61
|
- [Speech Synthesis, Recognition, and More With SpeechT5](https://huggingface.co/blog/speecht5)
|
|
64
62
|
- [Optimizing a Text-To-Speech model using 🤗 Transformers](https://huggingface.co/blog/optimizing-bark)
|
|
65
|
-
-
|
|
63
|
+
- [Train your own TTS models with Parler-TTS](https://github.com/huggingface/parler-tts)
|
|
@@ -4,8 +4,8 @@ const taskData: TaskDataCustom = {
|
|
|
4
4
|
canonicalId: "text-to-audio",
|
|
5
5
|
datasets: [
|
|
6
6
|
{
|
|
7
|
-
description: "
|
|
8
|
-
id: "
|
|
7
|
+
description: "10K hours of multi-speaker English dataset.",
|
|
8
|
+
id: "parler-tts/mls_eng_10k",
|
|
9
9
|
},
|
|
10
10
|
{
|
|
11
11
|
description: "Multi-speaker English dataset.",
|
|
@@ -43,8 +43,8 @@ const taskData: TaskDataCustom = {
|
|
|
43
43
|
id: "facebook/mms-tts",
|
|
44
44
|
},
|
|
45
45
|
{
|
|
46
|
-
description: "
|
|
47
|
-
id: "
|
|
46
|
+
description: "A prompt based, powerful TTS model.",
|
|
47
|
+
id: "parler-tts/parler_tts_mini_v0.1",
|
|
48
48
|
},
|
|
49
49
|
],
|
|
50
50
|
spaces: [
|
|
@@ -57,8 +57,8 @@ const taskData: TaskDataCustom = {
|
|
|
57
57
|
id: "coqui/xtts",
|
|
58
58
|
},
|
|
59
59
|
{
|
|
60
|
-
description: "An application that synthesizes speech for
|
|
61
|
-
id: "
|
|
60
|
+
description: "An application that synthesizes speech for diverse speaker prompts.",
|
|
61
|
+
id: "parler-tts/parler_tts_mini",
|
|
62
62
|
},
|
|
63
63
|
],
|
|
64
64
|
summary:
|