modelpedia 0.0.2 → 0.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (136) hide show
  1. package/README.md +141 -0
  2. package/dist/index.cjs +1 -101
  3. package/dist/index.d.cts +10 -2
  4. package/dist/index.d.mts +10 -2
  5. package/dist/index.mjs +1 -92
  6. package/dist/providers/alibaba.cjs +4 -835
  7. package/dist/providers/alibaba.d.cts +1 -1
  8. package/dist/providers/alibaba.d.mts +1 -1
  9. package/dist/providers/alibaba.mjs +4 -833
  10. package/dist/providers/amazon.cjs +4 -1484
  11. package/dist/providers/amazon.d.cts +1 -1
  12. package/dist/providers/amazon.d.mts +1 -1
  13. package/dist/providers/amazon.mjs +4 -1482
  14. package/dist/providers/anthropic.cjs +3 -698
  15. package/dist/providers/anthropic.d.cts +1 -1
  16. package/dist/providers/anthropic.d.mts +1 -1
  17. package/dist/providers/anthropic.mjs +3 -696
  18. package/dist/providers/azure.cjs +6 -1491
  19. package/dist/providers/azure.d.cts +1 -1
  20. package/dist/providers/azure.d.mts +1 -1
  21. package/dist/providers/azure.mjs +6 -1489
  22. package/dist/providers/baseten.cjs +3 -279
  23. package/dist/providers/baseten.d.cts +1 -1
  24. package/dist/providers/baseten.d.mts +1 -1
  25. package/dist/providers/baseten.mjs +3 -277
  26. package/dist/providers/cerebras.cjs +4 -145
  27. package/dist/providers/cerebras.d.cts +1 -1
  28. package/dist/providers/cerebras.d.mts +1 -1
  29. package/dist/providers/cerebras.mjs +4 -143
  30. package/dist/providers/cloudflare-ai-gateway.cjs +4 -0
  31. package/dist/providers/cloudflare-ai-gateway.d.cts +7 -0
  32. package/dist/providers/cloudflare-ai-gateway.d.mts +7 -0
  33. package/dist/providers/cloudflare-ai-gateway.mjs +4 -0
  34. package/dist/providers/cloudflare-workers-ai.cjs +4 -0
  35. package/dist/providers/cloudflare-workers-ai.d.cts +7 -0
  36. package/dist/providers/cloudflare-workers-ai.d.mts +7 -0
  37. package/dist/providers/cloudflare-workers-ai.mjs +4 -0
  38. package/dist/providers/cohere.cjs +5 -525
  39. package/dist/providers/cohere.d.cts +1 -1
  40. package/dist/providers/cohere.d.mts +1 -1
  41. package/dist/providers/cohere.mjs +5 -523
  42. package/dist/providers/cursor.cjs +1 -856
  43. package/dist/providers/cursor.d.cts +1 -1
  44. package/dist/providers/cursor.d.mts +1 -1
  45. package/dist/providers/cursor.mjs +1 -854
  46. package/dist/providers/deepseek.cjs +3 -64
  47. package/dist/providers/deepseek.d.cts +1 -1
  48. package/dist/providers/deepseek.d.mts +1 -1
  49. package/dist/providers/deepseek.mjs +3 -62
  50. package/dist/providers/fireworks.cjs +3 -3794
  51. package/dist/providers/fireworks.d.cts +1 -1
  52. package/dist/providers/fireworks.d.mts +1 -1
  53. package/dist/providers/fireworks.mjs +3 -3792
  54. package/dist/providers/google.cjs +6 -972
  55. package/dist/providers/google.d.cts +1 -1
  56. package/dist/providers/google.d.mts +1 -1
  57. package/dist/providers/google.mjs +6 -970
  58. package/dist/providers/groq.cjs +3 -353
  59. package/dist/providers/groq.d.cts +1 -1
  60. package/dist/providers/groq.d.mts +1 -1
  61. package/dist/providers/groq.mjs +3 -351
  62. package/dist/providers/huggingface.cjs +3 -1022
  63. package/dist/providers/huggingface.d.cts +1 -1
  64. package/dist/providers/huggingface.d.mts +1 -1
  65. package/dist/providers/huggingface.mjs +3 -1020
  66. package/dist/providers/meta.cjs +3 -481
  67. package/dist/providers/meta.d.cts +1 -1
  68. package/dist/providers/meta.d.mts +1 -1
  69. package/dist/providers/meta.mjs +3 -479
  70. package/dist/providers/minimax.cjs +3 -156
  71. package/dist/providers/minimax.d.cts +1 -1
  72. package/dist/providers/minimax.d.mts +1 -1
  73. package/dist/providers/minimax.mjs +3 -154
  74. package/dist/providers/mistral.cjs +3 -1594
  75. package/dist/providers/mistral.d.cts +1 -1
  76. package/dist/providers/mistral.d.mts +1 -1
  77. package/dist/providers/mistral.mjs +3 -1592
  78. package/dist/providers/moonshot.cjs +3 -138
  79. package/dist/providers/moonshot.d.cts +1 -1
  80. package/dist/providers/moonshot.d.mts +1 -1
  81. package/dist/providers/moonshot.mjs +3 -136
  82. package/dist/providers/nvidia.cjs +3 -2040
  83. package/dist/providers/nvidia.d.cts +1 -1
  84. package/dist/providers/nvidia.d.mts +1 -1
  85. package/dist/providers/nvidia.mjs +3 -2038
  86. package/dist/providers/ollama.cjs +3 -326
  87. package/dist/providers/ollama.d.cts +1 -1
  88. package/dist/providers/ollama.d.mts +1 -1
  89. package/dist/providers/ollama.mjs +3 -324
  90. package/dist/providers/openai.cjs +3 -9079
  91. package/dist/providers/openai.d.cts +1 -1
  92. package/dist/providers/openai.d.mts +1 -1
  93. package/dist/providers/openai.mjs +3 -9077
  94. package/dist/providers/opencode.cjs +3 -367
  95. package/dist/providers/opencode.d.cts +1 -1
  96. package/dist/providers/opencode.d.mts +1 -1
  97. package/dist/providers/opencode.mjs +3 -365
  98. package/dist/providers/openrouter.cjs +21 -7835
  99. package/dist/providers/openrouter.d.cts +1 -1
  100. package/dist/providers/openrouter.d.mts +1 -1
  101. package/dist/providers/openrouter.mjs +21 -7833
  102. package/dist/providers/perplexity.cjs +3 -311
  103. package/dist/providers/perplexity.d.cts +1 -1
  104. package/dist/providers/perplexity.d.mts +1 -1
  105. package/dist/providers/perplexity.mjs +3 -309
  106. package/dist/providers/qwen.cjs +3 -835
  107. package/dist/providers/qwen.d.cts +1 -1
  108. package/dist/providers/qwen.d.mts +1 -1
  109. package/dist/providers/qwen.mjs +3 -833
  110. package/dist/providers/together.cjs +4 -360
  111. package/dist/providers/together.d.cts +1 -1
  112. package/dist/providers/together.d.mts +1 -1
  113. package/dist/providers/together.mjs +4 -358
  114. package/dist/providers/vercel.cjs +3 -4037
  115. package/dist/providers/vercel.d.cts +1 -1
  116. package/dist/providers/vercel.d.mts +1 -1
  117. package/dist/providers/vercel.mjs +3 -4035
  118. package/dist/providers/vertex.cjs +10 -1072
  119. package/dist/providers/vertex.d.cts +1 -1
  120. package/dist/providers/vertex.d.mts +1 -1
  121. package/dist/providers/vertex.mjs +10 -1070
  122. package/dist/providers/xai.cjs +3 -301
  123. package/dist/providers/xai.d.cts +1 -1
  124. package/dist/providers/xai.d.mts +1 -1
  125. package/dist/providers/xai.mjs +3 -299
  126. package/dist/providers/zai.cjs +3 -243
  127. package/dist/providers/zai.d.cts +1 -1
  128. package/dist/providers/zai.d.mts +1 -1
  129. package/dist/providers/zai.mjs +3 -241
  130. package/dist/types.d.cts +80 -8
  131. package/dist/types.d.mts +80 -8
  132. package/package.json +3 -3
  133. package/dist/providers/cloudflare.cjs +0 -826
  134. package/dist/providers/cloudflare.d.cts +0 -7
  135. package/dist/providers/cloudflare.d.mts +0 -7
  136. package/dist/providers/cloudflare.mjs +0 -824
@@ -1,1020 +1,3 @@
1
- //#region src/providers/huggingface.ts
2
- const provider = {
3
- "id": "huggingface",
4
- "name": "Hugging Face",
5
- "region": "US",
6
- "url": "https://huggingface.co",
7
- "api_url": "https://router.huggingface.co/v1",
8
- "docs_url": "https://huggingface.co/docs/api-inference",
9
- "pricing_url": "https://huggingface.co/pricing",
10
- "icon": "<svg xmlns=\"http://www.w3.org/2000/svg\" viewBox=\"0 0 24 24\" fill=\"currentColor\" fill-rule=\"evenodd\">\n <path d=\"M16.781 3.277c2.997 1.704 4.844 4.851 4.844 8.258 0 .995-.155 1.955-.443 2.857a1.332 1.332 0 011.125.4 1.41 1.41 0 01.2 1.723c.204.165.352.385.428.632l.017.062c.06.222.12.69-.2 1.166.244.37.279.836.093 1.236-.255.57-.893 1.018-2.128 1.5l-.202.078-.131.048c-.478.173-.89.295-1.061.345l-.086.024c-.89.243-1.808.375-2.732.394-1.32 0-2.3-.36-2.923-1.067a9.852 9.852 0 01-3.18.018C9.778 21.647 8.802 22 7.494 22a11.249 11.249 0 01-2.541-.343l-.221-.06-.273-.08a16.574 16.574 0 01-1.175-.405c-1.237-.483-1.875-.93-2.13-1.501-.186-.4-.151-.867.093-1.236a1.42 1.42 0 01-.2-1.166c.069-.273.226-.516.447-.694a1.41 1.41 0 01.2-1.722c.233-.248.557-.391.917-.407l.078-.001a9.385 9.385 0 01-.44-2.85c0-3.407 1.847-6.554 4.844-8.258a9.822 9.822 0 019.687 0zM4.188 14.758c.125.687 2.357 2.35 2.14 2.707-.19.315-.796-.239-.948-.386l-.041-.04-.168-.147c-.561-.479-2.304-1.9-2.74-1.432-.43.46.119.859 1.055 1.42l.784.467.136.083c1.045.643 1.12.84.95 1.113-.188.295-3.07-2.1-3.34-1.083-.27 1.011 2.942 1.304 2.744 2.006-.2.7-2.265-1.324-2.685-.537-.425.79 2.913 1.718 2.94 1.725l.16.04.175.042c1.227.284 3.565.65 4.435-.604.673-.973.64-1.709-.248-2.61l-.057-.057c-.945-.928-1.495-2.288-1.495-2.288l-.017-.058-.025-.072c-.082-.22-.284-.639-.63-.584-.46.073-.798 1.21.12 1.933l.05.038c.977.721-.195 1.21-.573.534l-.058-.104-.143-.25c-.463-.799-1.282-2.111-1.739-2.397-.532-.332-.907-.148-.782.541zm14.842-.541c-.533.335-1.563 2.074-1.94 2.751a.613.613 0 01-.687.302.436.436 0 01-.176-.098.303.303 0 01-.049-.06l-.014-.028-.008-.02-.007-.019-.003-.013-.003-.017a.289.289 0 01-.004-.048c0-.12.071-.266.25-.427.026-.024.054-.047.084-.07l.047-.036c.022-.016.043-.032.063-.049.883-.71.573-1.81.131-1.917l-.031-.006-.056-.004a.368.368 0 00-.062.006l-.028.005-.042.014-.039.017-.028.015-.028.019-.036.027-.023.02c-.173.158-.273.428-.31.542l-.016.054s-.53 1.309-1.439 2.234l-.054.054c-.365.358-.596.69-.702 1.018-.143.437-.066.868.21 1.353.055.097.117.195.187.296.882 1.275 3.282.876 4.494.59l.286-.07.25-.074c.276-.084.736-.233 1.2-.42l.188-.077.065-.028.064-.028.124-.056.081-.038c.529-.252.964-.543.994-.827l.001-.036a.299.299 0 00-.037-.139c-.094-.176-.271-.212-.491-.168l-.045.01c-.044.01-.09.024-.136.04l-.097.035-.054.022c-.559.23-1.238.705-1.607.745h.006a.452.452 0 01-.05.003h-.024l-.024-.003-.023-.005c-.068-.016-.116-.06-.14-.142a.22.22 0 01-.005-.1c.062-.345.958-.595 1.713-.91l.066-.028c.528-.224.97-.483.985-.832v-.04a.47.47 0 00-.016-.098c-.048-.18-.175-.251-.36-.251-.785 0-2.55 1.36-2.92 1.36-.025 0-.048-.007-.058-.024a.6.6 0 01-.046-.088c-.1-.238.068-.462 1.06-1.066l.209-.126c.538-.32 1.01-.588 1.341-.831.29-.212.475-.406.503-.6l.003-.028c.008-.113-.038-.227-.147-.344a.266.266 0 00-.07-.054l-.034-.015-.013-.005a.403.403 0 00-.13-.02c-.162 0-.369.07-.595.18-.637.313-1.431.952-1.826 1.285l-.249.215-.033.033c-.08.078-.288.27-.493.386l-.071.037-.041.019a.535.535 0 01-.122.036h.005a.346.346 0 01-.031.003l.01-.001-.013.001c-.079.005-.145-.021-.19-.095a.113.113 0 01-.014-.065c.027-.465 2.034-1.991 2.152-2.642l.009-.048c.1-.65-.271-.817-.791-.493zM11.938 2.984c-4.798 0-8.688 3.829-8.688 8.55 0 .692.083 1.364.24 2.008l.008-.009c.252-.298.612-.46 1.017-.46.355.008.699.117.993.312.22.14.465.384.715.694.261-.372.69-.598 1.15-.605.852 0 1.367.728 1.562 1.383l.047.105.06.127c.192.396.595 1.139 1.143 1.68 1.06 1.04 1.324 2.115.8 3.266a8.865 8.865 0 002.024-.014c-.505-1.12-.26-2.17.74-3.186l.066-.066c.695-.684 1.157-1.69 1.252-1.912.195-.655.708-1.383 1.56-1.383.46.007.889.233 1.15.605.25-.31.495-.553.718-.694a1.87 1.87 0 01.99-.312c.357 0 .682.126.925.36.14-.61.215-1.245.215-1.898 0-4.722-3.89-8.55-8.687-8.55zm1.857 8.926l.439-.212c.553-.264.89-.383.89.152 0 1.093-.771 3.208-3.155 3.262h-.184c-2.325-.052-3.116-2.06-3.156-3.175l-.001-.087c0-1.107 1.452.586 3.25.586.716 0 1.379-.272 1.917-.526zm4.017-3.143c.45 0 .813.358.813.8 0 .441-.364.8-.813.8a.806.806 0 01-.812-.8c0-.442.364-.8.812-.8zm-11.624 0c.448 0 .812.358.812.8 0 .441-.364.8-.812.8a.806.806 0 01-.813-.8c0-.442.364-.8.813-.8zm7.79-.841c.32-.384.846-.54 1.33-.394.483.146.83.564.878 1.06.048.495-.212.97-.659 1.203-.322.168-.447-.477-.767-.585l.002-.003c-.287-.098-.772.362-.925.079a1.215 1.215 0 01.14-1.36zm-4.323 0c.322.384.377.92.14 1.36-.152.283-.64-.177-.925-.079l.003.003c-.108.036-.194.134-.273.24l-.118.165c-.11.15-.22.262-.377.18a1.226 1.226 0 01-.658-1.204c.048-.495.395-.913.878-1.059a1.262 1.262 0 011.33.394z\"/>\n</svg>",
11
- "models": [
12
- {
13
- "id": "allenai/Olmo-3-7B-Instruct",
14
- "name": "Olmo-3-7B-Instruct",
15
- "created_by": "allenai",
16
- "source": "official",
17
- "last_updated": "2026-03-21",
18
- "capabilities": { "streaming": true }
19
- },
20
- {
21
- "id": "allenai/Olmo-3.1-32B-Instruct",
22
- "name": "Olmo-3.1-32B-Instruct",
23
- "created_by": "allenai",
24
- "source": "official",
25
- "last_updated": "2026-03-21",
26
- "capabilities": { "streaming": true }
27
- },
28
- {
29
- "id": "argilla/Llama-3.2-1B-Instruct-APIGen-FC-v0.1",
30
- "name": "Llama-3.2-1B-Instruct-APIGen-FC-v0.1",
31
- "created_by": "argilla",
32
- "source": "official",
33
- "last_updated": "2026-03-21",
34
- "capabilities": { "streaming": true }
35
- },
36
- {
37
- "id": "argilla-warehouse/Llama-3.2-1B-Instruct-v2-FC",
38
- "name": "Llama-3.2-1B-Instruct-v2-FC",
39
- "created_by": "argilla-warehouse",
40
- "source": "official",
41
- "last_updated": "2026-03-21",
42
- "capabilities": { "streaming": true }
43
- },
44
- {
45
- "id": "DeepMount00/Llama-3-8b-Ita",
46
- "name": "Llama-3-8b-Ita",
47
- "created_by": "deepmount00",
48
- "source": "official",
49
- "last_updated": "2026-03-21",
50
- "capabilities": { "streaming": true }
51
- },
52
- {
53
- "id": "deepseek-ai/DeepSeek-R1-0528-Qwen3-8B",
54
- "name": "DeepSeek-R1-0528-Qwen3-8B",
55
- "created_by": "deepseek",
56
- "source": "official",
57
- "last_updated": "2026-03-21",
58
- "capabilities": { "streaming": true }
59
- },
60
- {
61
- "id": "Efficient-Large-Model/gemma-2-2b-it",
62
- "name": "gemma-2-2b-it",
63
- "created_by": "efficient-large-model",
64
- "source": "official",
65
- "last_updated": "2026-03-21",
66
- "family": "gemma-2",
67
- "capabilities": { "streaming": true }
68
- },
69
- {
70
- "id": "EssentialAI/rnj-1-instruct",
71
- "name": "rnj-1-instruct",
72
- "created_by": "essentialai",
73
- "source": "official",
74
- "last_updated": "2026-03-21",
75
- "capabilities": { "streaming": true }
76
- },
77
- {
78
- "id": "failspy/Meta-Llama-3-8B-Instruct-abliterated-v3",
79
- "name": "Meta-Llama-3-8B-Instruct-abliterated-v3",
80
- "created_by": "failspy",
81
- "source": "official",
82
- "last_updated": "2026-03-21",
83
- "capabilities": { "streaming": true }
84
- },
85
- {
86
- "id": "Featherless-Chat-Models/Mistral-7B-Instruct-v0.2",
87
- "name": "Mistral-7B-Instruct-v0.2",
88
- "created_by": "featherless-chat-models",
89
- "source": "official",
90
- "last_updated": "2026-03-21",
91
- "capabilities": { "streaming": true }
92
- },
93
- {
94
- "id": "FlagAlpha/Llama3-Chinese-8B-Instruct",
95
- "name": "Llama3-Chinese-8B-Instruct",
96
- "created_by": "flagalpha",
97
- "source": "official",
98
- "last_updated": "2026-03-21",
99
- "capabilities": { "streaming": true }
100
- },
101
- {
102
- "id": "google/gemma-2-2b-jpn-it",
103
- "name": "gemma-2-2b-jpn-it",
104
- "created_by": "google",
105
- "source": "official",
106
- "last_updated": "2026-03-21",
107
- "family": "gemma-2",
108
- "capabilities": { "streaming": true }
109
- },
110
- {
111
- "id": "google/gemma-2-9b-it",
112
- "name": "gemma-2-9b-it",
113
- "created_by": "google",
114
- "source": "official",
115
- "last_updated": "2026-03-21",
116
- "family": "gemma-2",
117
- "capabilities": { "streaming": true }
118
- },
119
- {
120
- "id": "GraySwanAI/Llama-3-8B-Instruct-RR",
121
- "name": "Llama-3-8B-Instruct-RR",
122
- "created_by": "grayswanai",
123
- "source": "official",
124
- "last_updated": "2026-03-21",
125
- "capabilities": { "streaming": true }
126
- },
127
- {
128
- "id": "GritLM/GritLM-7B",
129
- "name": "GritLM-7B",
130
- "created_by": "gritlm",
131
- "source": "official",
132
- "last_updated": "2026-03-21",
133
- "capabilities": { "streaming": true }
134
- },
135
- {
136
- "id": "huihui-ai/Qwen2.5-72B-Instruct-abliterated",
137
- "name": "Qwen2.5-72B-Instruct-abliterated",
138
- "created_by": "huihui-ai",
139
- "source": "official",
140
- "last_updated": "2026-03-21",
141
- "capabilities": { "streaming": true }
142
- },
143
- {
144
- "id": "instruction-pretrain/finance-Llama3-8B",
145
- "name": "finance-Llama3-8B",
146
- "created_by": "instruction-pretrain",
147
- "source": "official",
148
- "last_updated": "2026-03-21",
149
- "capabilities": { "streaming": true }
150
- },
151
- {
152
- "id": "Intel/neural-chat-7b-v3-3",
153
- "name": "neural-chat-7b-v3-3",
154
- "created_by": "intel",
155
- "source": "official",
156
- "last_updated": "2026-03-21",
157
- "capabilities": { "streaming": true }
158
- },
159
- {
160
- "id": "invalid-coder/Sakura-SOLAR-Instruct-CarbonVillain-en-10.7B-v2-slerp",
161
- "name": "Sakura-SOLAR-Instruct-CarbonVillain-en-10.7B-v2-slerp",
162
- "created_by": "invalid-coder",
163
- "source": "official",
164
- "last_updated": "2026-03-21",
165
- "capabilities": { "streaming": true }
166
- },
167
- {
168
- "id": "m8than/Mistral-Nemo-Instruct-2407-lenient-chatfix",
169
- "name": "Mistral-Nemo-Instruct-2407-lenient-chatfix",
170
- "created_by": "m8than",
171
- "source": "official",
172
- "last_updated": "2026-03-21",
173
- "capabilities": { "streaming": true }
174
- },
175
- {
176
- "id": "meta-llama/Llama-2-13b-chat-hf",
177
- "name": "Llama-2-13b-chat-hf",
178
- "created_by": "meta",
179
- "source": "official",
180
- "last_updated": "2026-03-21",
181
- "capabilities": { "streaming": true }
182
- },
183
- {
184
- "id": "meta-llama/Llama-3.1-70B-Instruct",
185
- "name": "Llama-3.1-70B-Instruct",
186
- "created_by": "meta",
187
- "source": "official",
188
- "last_updated": "2026-03-21",
189
- "capabilities": { "streaming": true }
190
- },
191
- {
192
- "id": "meta-llama/Llama-3.1-8B-Instruct",
193
- "name": "Llama-3.1-8B-Instruct",
194
- "created_by": "meta",
195
- "source": "official",
196
- "last_updated": "2026-03-21",
197
- "capabilities": { "streaming": true }
198
- },
199
- {
200
- "id": "meta-llama/Llama-3.2-1B-Instruct",
201
- "name": "Llama-3.2-1B-Instruct",
202
- "created_by": "meta",
203
- "source": "official",
204
- "last_updated": "2026-03-21",
205
- "capabilities": { "streaming": true }
206
- },
207
- {
208
- "id": "meta-llama/Llama-3.3-70B-Instruct",
209
- "name": "Llama-3.3-70B-Instruct",
210
- "created_by": "meta",
211
- "source": "official",
212
- "last_updated": "2026-03-21",
213
- "capabilities": { "streaming": true }
214
- },
215
- {
216
- "id": "meta-llama/Meta-Llama-3-70B-Instruct",
217
- "name": "Meta-Llama-3-70B-Instruct",
218
- "created_by": "meta",
219
- "source": "official",
220
- "last_updated": "2026-03-21",
221
- "capabilities": { "streaming": true }
222
- },
223
- {
224
- "id": "meta-llama/Meta-Llama-3-8B-Instruct",
225
- "name": "Meta-Llama-3-8B-Instruct",
226
- "created_by": "meta",
227
- "source": "official",
228
- "last_updated": "2026-03-21",
229
- "capabilities": { "streaming": true }
230
- },
231
- {
232
- "id": "mistralai/Mistral-7B-Instruct-v0.2",
233
- "name": "Mistral-7B-Instruct-v0.2",
234
- "created_by": "mistral",
235
- "source": "official",
236
- "last_updated": "2026-03-21",
237
- "capabilities": { "streaming": true }
238
- },
239
- {
240
- "id": "mlabonne/NeuralDaredevil-8B-abliterated",
241
- "name": "NeuralDaredevil-8B-abliterated",
242
- "created_by": "mlabonne",
243
- "source": "official",
244
- "last_updated": "2026-03-21",
245
- "capabilities": { "streaming": true }
246
- },
247
- {
248
- "id": "moonshotai/Kimi-K2-Instruct-0905",
249
- "name": "Kimi-K2-Instruct-0905",
250
- "created_by": "moonshot",
251
- "source": "official",
252
- "last_updated": "2026-03-21",
253
- "capabilities": { "streaming": true }
254
- },
255
- {
256
- "id": "moonshotai/Kimi-K2-Instruct",
257
- "name": "Kimi-K2-Instruct",
258
- "created_by": "moonshot",
259
- "source": "official",
260
- "last_updated": "2026-03-21",
261
- "capabilities": { "streaming": true }
262
- },
263
- {
264
- "id": "moonshotai/Kimi-K2-Thinking",
265
- "name": "Kimi-K2-Thinking",
266
- "created_by": "moonshot",
267
- "source": "official",
268
- "last_updated": "2026-03-21",
269
- "capabilities": { "streaming": true }
270
- },
271
- {
272
- "id": "NousResearch/Meta-Llama-3-70B-Instruct",
273
- "name": "Meta-Llama-3-70B-Instruct",
274
- "created_by": "nousresearch",
275
- "source": "official",
276
- "last_updated": "2026-03-21",
277
- "capabilities": { "streaming": true }
278
- },
279
- {
280
- "id": "NousResearch/Meta-Llama-3-8B-Instruct",
281
- "name": "Meta-Llama-3-8B-Instruct",
282
- "created_by": "nousresearch",
283
- "source": "official",
284
- "last_updated": "2026-03-21",
285
- "capabilities": { "streaming": true }
286
- },
287
- {
288
- "id": "NousResearch/Meta-Llama-3.1-8B-Instruct",
289
- "name": "Meta-Llama-3.1-8B-Instruct",
290
- "created_by": "nousresearch",
291
- "source": "official",
292
- "last_updated": "2026-03-21",
293
- "capabilities": { "streaming": true }
294
- },
295
- {
296
- "id": "nvidia/Llama-3.1-8B-Instruct-FP8",
297
- "name": "Llama-3.1-8B-Instruct-FP8",
298
- "created_by": "nvidia",
299
- "source": "official",
300
- "last_updated": "2026-03-21",
301
- "capabilities": { "streaming": true }
302
- },
303
- {
304
- "id": "openai/gpt-oss-120b",
305
- "name": "gpt-oss-120b",
306
- "created_by": "openai",
307
- "source": "official",
308
- "last_updated": "2026-03-21",
309
- "family": "gpt-oss",
310
- "capabilities": { "streaming": true }
311
- },
312
- {
313
- "id": "openai/gpt-oss-20b",
314
- "name": "gpt-oss-20b",
315
- "created_by": "openai",
316
- "source": "official",
317
- "last_updated": "2026-03-21",
318
- "family": "gpt-oss",
319
- "capabilities": { "streaming": true }
320
- },
321
- {
322
- "id": "openai/gpt-oss-safeguard-20b",
323
- "name": "gpt-oss-safeguard-20b",
324
- "created_by": "openai",
325
- "source": "official",
326
- "last_updated": "2026-03-21",
327
- "family": "gpt-oss",
328
- "capabilities": { "streaming": true },
329
- "model_type": "moderation"
330
- },
331
- {
332
- "id": "openchat/openchat-3.6-8b-20240522",
333
- "name": "openchat-3.6-8b-20240522",
334
- "created_by": "openchat",
335
- "source": "official",
336
- "last_updated": "2026-03-21",
337
- "alias": "openchat/openchat-3.6-8b",
338
- "capabilities": { "streaming": true }
339
- },
340
- {
341
- "id": "openchat/openchat-3.6-8b",
342
- "name": "openchat-3.6-8b-20240522",
343
- "created_by": "openchat",
344
- "source": "official",
345
- "last_updated": "2026-03-21",
346
- "capabilities": { "streaming": true },
347
- "snapshots": ["openchat/openchat-3.6-8b-20240522"]
348
- },
349
- {
350
- "id": "Orion-zhen/Qwen2.5-7B-Instruct-Uncensored",
351
- "name": "Qwen2.5-7B-Instruct-Uncensored",
352
- "created_by": "orion-zhen",
353
- "source": "official",
354
- "last_updated": "2026-03-21",
355
- "capabilities": { "streaming": true }
356
- },
357
- {
358
- "id": "PatronusAI/Llama-3-Patronus-Lynx-8B-Instruct-v1.1",
359
- "name": "Llama-3-Patronus-Lynx-8B-Instruct-v1.1",
360
- "created_by": "patronusai",
361
- "source": "official",
362
- "last_updated": "2026-03-21",
363
- "capabilities": { "streaming": true }
364
- },
365
- {
366
- "id": "Qwen/Qwen1.5-1.8B-Chat",
367
- "name": "Qwen1.5-1.8B-Chat",
368
- "created_by": "qwen",
369
- "source": "official",
370
- "last_updated": "2026-03-21",
371
- "capabilities": { "streaming": true }
372
- },
373
- {
374
- "id": "Qwen/Qwen2-1.5B-Instruct",
375
- "name": "Qwen2-1.5B-Instruct",
376
- "created_by": "qwen",
377
- "source": "official",
378
- "last_updated": "2026-03-21",
379
- "capabilities": { "streaming": true }
380
- },
381
- {
382
- "id": "Qwen/Qwen2-72B-Instruct",
383
- "name": "Qwen2-72B-Instruct",
384
- "created_by": "qwen",
385
- "source": "official",
386
- "last_updated": "2026-03-21",
387
- "capabilities": { "streaming": true }
388
- },
389
- {
390
- "id": "Qwen/Qwen2-7B-Instruct",
391
- "name": "Qwen2-7B-Instruct",
392
- "created_by": "qwen",
393
- "source": "official",
394
- "last_updated": "2026-03-21",
395
- "capabilities": { "streaming": true }
396
- },
397
- {
398
- "id": "Qwen/Qwen2.5-1.5B-Instruct",
399
- "name": "Qwen2.5-1.5B-Instruct",
400
- "created_by": "qwen",
401
- "source": "official",
402
- "last_updated": "2026-03-21",
403
- "capabilities": { "streaming": true }
404
- },
405
- {
406
- "id": "Qwen/Qwen2.5-14B-Instruct-1M",
407
- "name": "Qwen2.5-14B-Instruct-1M",
408
- "created_by": "qwen",
409
- "source": "official",
410
- "last_updated": "2026-03-21",
411
- "capabilities": { "streaming": true }
412
- },
413
- {
414
- "id": "Qwen/Qwen2.5-14B-Instruct",
415
- "name": "Qwen2.5-14B-Instruct",
416
- "created_by": "qwen",
417
- "source": "official",
418
- "last_updated": "2026-03-21",
419
- "capabilities": { "streaming": true }
420
- },
421
- {
422
- "id": "Qwen/Qwen2.5-32B-Instruct",
423
- "name": "Qwen2.5-32B-Instruct",
424
- "created_by": "qwen",
425
- "source": "official",
426
- "last_updated": "2026-03-21",
427
- "capabilities": { "streaming": true }
428
- },
429
- {
430
- "id": "Qwen/Qwen2.5-72B-Instruct",
431
- "name": "Qwen2.5-72B-Instruct",
432
- "created_by": "qwen",
433
- "source": "official",
434
- "last_updated": "2026-03-21",
435
- "capabilities": { "streaming": true }
436
- },
437
- {
438
- "id": "Qwen/Qwen2.5-7B-Instruct-1M",
439
- "name": "Qwen2.5-7B-Instruct-1M",
440
- "created_by": "qwen",
441
- "source": "official",
442
- "last_updated": "2026-03-21",
443
- "capabilities": { "streaming": true }
444
- },
445
- {
446
- "id": "Qwen/Qwen2.5-7B-Instruct",
447
- "name": "Qwen2.5-7B-Instruct",
448
- "created_by": "qwen",
449
- "source": "official",
450
- "last_updated": "2026-03-21",
451
- "capabilities": { "streaming": true }
452
- },
453
- {
454
- "id": "Qwen/Qwen2.5-Coder-1.5B-Instruct",
455
- "name": "Qwen2.5-Coder-1.5B-Instruct",
456
- "created_by": "qwen",
457
- "source": "official",
458
- "last_updated": "2026-03-21",
459
- "capabilities": { "streaming": true }
460
- },
461
- {
462
- "id": "Qwen/Qwen2.5-Coder-1.5B",
463
- "name": "Qwen2.5-Coder-1.5B",
464
- "created_by": "qwen",
465
- "source": "official",
466
- "last_updated": "2026-03-21",
467
- "capabilities": { "streaming": true }
468
- },
469
- {
470
- "id": "Qwen/Qwen2.5-Coder-14B-Instruct",
471
- "name": "Qwen2.5-Coder-14B-Instruct",
472
- "created_by": "qwen",
473
- "source": "official",
474
- "last_updated": "2026-03-21",
475
- "capabilities": { "streaming": true }
476
- },
477
- {
478
- "id": "Qwen/Qwen2.5-Coder-14B",
479
- "name": "Qwen2.5-Coder-14B",
480
- "created_by": "qwen",
481
- "source": "official",
482
- "last_updated": "2026-03-21",
483
- "capabilities": { "streaming": true }
484
- },
485
- {
486
- "id": "Qwen/Qwen2.5-Coder-32B-Instruct",
487
- "name": "Qwen2.5-Coder-32B-Instruct",
488
- "created_by": "qwen",
489
- "source": "official",
490
- "last_updated": "2026-03-21",
491
- "capabilities": { "streaming": true }
492
- },
493
- {
494
- "id": "Qwen/Qwen2.5-Coder-32B",
495
- "name": "Qwen2.5-Coder-32B",
496
- "created_by": "qwen",
497
- "source": "official",
498
- "last_updated": "2026-03-21",
499
- "capabilities": { "streaming": true }
500
- },
501
- {
502
- "id": "Qwen/Qwen2.5-Coder-3B-Instruct",
503
- "name": "Qwen2.5-Coder-3B-Instruct",
504
- "created_by": "qwen",
505
- "source": "official",
506
- "last_updated": "2026-03-21",
507
- "capabilities": { "streaming": true }
508
- },
509
- {
510
- "id": "Qwen/Qwen2.5-Coder-7B-Instruct",
511
- "name": "Qwen2.5-Coder-7B-Instruct",
512
- "created_by": "qwen",
513
- "source": "official",
514
- "last_updated": "2026-03-21",
515
- "capabilities": { "streaming": true }
516
- },
517
- {
518
- "id": "Qwen/Qwen2.5-Coder-7B",
519
- "name": "Qwen2.5-Coder-7B",
520
- "created_by": "qwen",
521
- "source": "official",
522
- "last_updated": "2026-03-21",
523
- "capabilities": { "streaming": true }
524
- },
525
- {
526
- "id": "Qwen/Qwen2.5-Math-1.5B-Instruct",
527
- "name": "Qwen2.5-Math-1.5B-Instruct",
528
- "created_by": "qwen",
529
- "source": "official",
530
- "last_updated": "2026-03-21",
531
- "capabilities": { "streaming": true }
532
- },
533
- {
534
- "id": "Qwen/Qwen2.5-Math-7B-Instruct",
535
- "name": "Qwen2.5-Math-7B-Instruct",
536
- "created_by": "qwen",
537
- "source": "official",
538
- "last_updated": "2026-03-21",
539
- "capabilities": { "streaming": true }
540
- },
541
- {
542
- "id": "Qwen/Qwen3-1.7B",
543
- "name": "Qwen3-1.7B",
544
- "created_by": "qwen",
545
- "source": "official",
546
- "last_updated": "2026-03-21",
547
- "capabilities": { "streaming": true }
548
- },
549
- {
550
- "id": "Qwen/Qwen3-14B-Base",
551
- "name": "Qwen3-14B-Base",
552
- "created_by": "qwen",
553
- "source": "official",
554
- "last_updated": "2026-03-21",
555
- "capabilities": { "streaming": true }
556
- },
557
- {
558
- "id": "Qwen/Qwen3-14B",
559
- "name": "Qwen3-14B",
560
- "created_by": "qwen",
561
- "source": "official",
562
- "last_updated": "2026-03-21",
563
- "capabilities": { "streaming": true }
564
- },
565
- {
566
- "id": "Qwen/Qwen3-235B-A22B-Instruct-2507",
567
- "name": "Qwen3-235B-A22B-Instruct-2507",
568
- "created_by": "qwen",
569
- "source": "official",
570
- "last_updated": "2026-03-21",
571
- "alias": "Qwen/Qwen3-235B-A22B-Instruct",
572
- "capabilities": { "streaming": true }
573
- },
574
- {
575
- "id": "Qwen/Qwen3-235B-A22B-Instruct",
576
- "name": "Qwen3-235B-A22B-Instruct-2507",
577
- "created_by": "qwen",
578
- "source": "official",
579
- "last_updated": "2026-03-21",
580
- "capabilities": { "streaming": true },
581
- "snapshots": ["Qwen/Qwen3-235B-A22B-Instruct-2507"]
582
- },
583
- {
584
- "id": "Qwen/Qwen3-235B-A22B-Thinking-2507",
585
- "name": "Qwen3-235B-A22B-Thinking-2507",
586
- "created_by": "qwen",
587
- "source": "official",
588
- "last_updated": "2026-03-21",
589
- "alias": "Qwen/Qwen3-235B-A22B-Thinking",
590
- "capabilities": { "streaming": true }
591
- },
592
- {
593
- "id": "Qwen/Qwen3-235B-A22B-Thinking",
594
- "name": "Qwen3-235B-A22B-Thinking-2507",
595
- "created_by": "qwen",
596
- "source": "official",
597
- "last_updated": "2026-03-21",
598
- "capabilities": { "streaming": true },
599
- "snapshots": ["Qwen/Qwen3-235B-A22B-Thinking-2507"]
600
- },
601
- {
602
- "id": "Qwen/Qwen3-235B-A22B",
603
- "name": "Qwen3-235B-A22B",
604
- "created_by": "qwen",
605
- "source": "official",
606
- "last_updated": "2026-03-21",
607
- "capabilities": { "streaming": true }
608
- },
609
- {
610
- "id": "Qwen/Qwen3-30B-A3B-Instruct-2507",
611
- "name": "Qwen3-30B-A3B-Instruct-2507",
612
- "created_by": "qwen",
613
- "source": "official",
614
- "last_updated": "2026-03-21",
615
- "alias": "Qwen/Qwen3-30B-A3B-Instruct",
616
- "capabilities": { "streaming": true }
617
- },
618
- {
619
- "id": "Qwen/Qwen3-30B-A3B-Instruct",
620
- "name": "Qwen3-30B-A3B-Instruct-2507",
621
- "created_by": "qwen",
622
- "source": "official",
623
- "last_updated": "2026-03-21",
624
- "capabilities": { "streaming": true },
625
- "snapshots": ["Qwen/Qwen3-30B-A3B-Instruct-2507"]
626
- },
627
- {
628
- "id": "Qwen/Qwen3-30B-A3B",
629
- "name": "Qwen3-30B-A3B",
630
- "created_by": "qwen",
631
- "source": "official",
632
- "last_updated": "2026-03-21",
633
- "capabilities": { "streaming": true }
634
- },
635
- {
636
- "id": "Qwen/Qwen3-32B",
637
- "name": "Qwen3-32B",
638
- "created_by": "qwen",
639
- "source": "official",
640
- "last_updated": "2026-03-21",
641
- "capabilities": { "streaming": true }
642
- },
643
- {
644
- "id": "Qwen/Qwen3-4B-Instruct-2507",
645
- "name": "Qwen3-4B-Instruct-2507",
646
- "created_by": "qwen",
647
- "source": "official",
648
- "last_updated": "2026-03-21",
649
- "alias": "Qwen/Qwen3-4B-Instruct",
650
- "capabilities": { "streaming": true }
651
- },
652
- {
653
- "id": "Qwen/Qwen3-4B-Instruct",
654
- "name": "Qwen3-4B-Instruct-2507",
655
- "created_by": "qwen",
656
- "source": "official",
657
- "last_updated": "2026-03-21",
658
- "capabilities": { "streaming": true },
659
- "snapshots": ["Qwen/Qwen3-4B-Instruct-2507"]
660
- },
661
- {
662
- "id": "Qwen/Qwen3-4B-Thinking-2507",
663
- "name": "Qwen3-4B-Thinking-2507",
664
- "created_by": "qwen",
665
- "source": "official",
666
- "last_updated": "2026-03-21",
667
- "alias": "Qwen/Qwen3-4B-Thinking",
668
- "capabilities": { "streaming": true }
669
- },
670
- {
671
- "id": "Qwen/Qwen3-4B-Thinking",
672
- "name": "Qwen3-4B-Thinking-2507",
673
- "created_by": "qwen",
674
- "source": "official",
675
- "last_updated": "2026-03-21",
676
- "capabilities": { "streaming": true },
677
- "snapshots": ["Qwen/Qwen3-4B-Thinking-2507"]
678
- },
679
- {
680
- "id": "Qwen/Qwen3-8B-Base",
681
- "name": "Qwen3-8B-Base",
682
- "created_by": "qwen",
683
- "source": "official",
684
- "last_updated": "2026-03-21",
685
- "capabilities": { "streaming": true }
686
- },
687
- {
688
- "id": "Qwen/Qwen3-8B",
689
- "name": "Qwen3-8B",
690
- "created_by": "qwen",
691
- "source": "official",
692
- "last_updated": "2026-03-21",
693
- "capabilities": { "streaming": true }
694
- },
695
- {
696
- "id": "Qwen/Qwen3-Coder-30B-A3B-Instruct",
697
- "name": "Qwen3-Coder-30B-A3B-Instruct",
698
- "created_by": "qwen",
699
- "source": "official",
700
- "last_updated": "2026-03-21",
701
- "capabilities": { "streaming": true }
702
- },
703
- {
704
- "id": "Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8",
705
- "name": "Qwen3-Coder-480B-A35B-Instruct-FP8",
706
- "created_by": "qwen",
707
- "source": "official",
708
- "last_updated": "2026-03-21",
709
- "capabilities": { "streaming": true }
710
- },
711
- {
712
- "id": "Qwen/Qwen3-Coder-480B-A35B-Instruct",
713
- "name": "Qwen3-Coder-480B-A35B-Instruct",
714
- "created_by": "qwen",
715
- "source": "official",
716
- "last_updated": "2026-03-21",
717
- "capabilities": { "streaming": true }
718
- },
719
- {
720
- "id": "Qwen/Qwen3-Coder-Next-FP8",
721
- "name": "Qwen3-Coder-Next-FP8",
722
- "created_by": "qwen",
723
- "source": "official",
724
- "last_updated": "2026-03-21",
725
- "capabilities": { "streaming": true }
726
- },
727
- {
728
- "id": "Qwen/Qwen3-Coder-Next",
729
- "name": "Qwen3-Coder-Next",
730
- "created_by": "qwen",
731
- "source": "official",
732
- "last_updated": "2026-03-21",
733
- "capabilities": { "streaming": true }
734
- },
735
- {
736
- "id": "Qwen/Qwen3-Next-80B-A3B-Instruct",
737
- "name": "Qwen3-Next-80B-A3B-Instruct",
738
- "created_by": "qwen",
739
- "source": "official",
740
- "last_updated": "2026-03-21",
741
- "capabilities": { "streaming": true }
742
- },
743
- {
744
- "id": "Qwen/Qwen3-Next-80B-A3B-Thinking",
745
- "name": "Qwen3-Next-80B-A3B-Thinking",
746
- "created_by": "qwen",
747
- "source": "official",
748
- "last_updated": "2026-03-21",
749
- "capabilities": { "streaming": true }
750
- },
751
- {
752
- "id": "RedHatAI/Meta-Llama-3-8B-Instruct-FP8-KV",
753
- "name": "Meta-Llama-3-8B-Instruct-FP8-KV",
754
- "created_by": "redhatai",
755
- "source": "official",
756
- "last_updated": "2026-03-21",
757
- "capabilities": { "streaming": true }
758
- },
759
- {
760
- "id": "SakanaAI/TinySwallow-1.5B-Instruct",
761
- "name": "TinySwallow-1.5B-Instruct",
762
- "created_by": "sakanaai",
763
- "source": "official",
764
- "last_updated": "2026-03-21",
765
- "capabilities": { "streaming": true }
766
- },
767
- {
768
- "id": "speakleash/Bielik-7B-Instruct-v0.1",
769
- "name": "Bielik-7B-Instruct-v0.1",
770
- "created_by": "speakleash",
771
- "source": "official",
772
- "last_updated": "2026-03-21",
773
- "capabilities": { "streaming": true }
774
- },
775
- {
776
- "id": "swap-uniba/LLaMAntino-3-ANITA-8B-Inst-DPO-ITA",
777
- "name": "LLaMAntino-3-ANITA-8B-Inst-DPO-ITA",
778
- "created_by": "swap-uniba",
779
- "source": "official",
780
- "last_updated": "2026-03-21",
781
- "capabilities": { "streaming": true }
782
- },
783
- {
784
- "id": "swiss-ai/Apertus-8B-Instruct-2509",
785
- "name": "Apertus-8B-Instruct-2509",
786
- "created_by": "swiss-ai",
787
- "source": "official",
788
- "last_updated": "2026-03-21",
789
- "alias": "swiss-ai/Apertus-8B-Instruct",
790
- "capabilities": { "streaming": true }
791
- },
792
- {
793
- "id": "swiss-ai/Apertus-8B-Instruct",
794
- "name": "Apertus-8B-Instruct-2509",
795
- "created_by": "swiss-ai",
796
- "source": "official",
797
- "last_updated": "2026-03-21",
798
- "capabilities": { "streaming": true },
799
- "snapshots": ["swiss-ai/Apertus-8B-Instruct-2509"]
800
- },
801
- {
802
- "id": "tokyotech-llm/Llama-3-Swallow-8B-Instruct-v0.1",
803
- "name": "Llama-3-Swallow-8B-Instruct-v0.1",
804
- "created_by": "tokyotech-llm",
805
- "source": "official",
806
- "last_updated": "2026-03-21",
807
- "capabilities": { "streaming": true }
808
- },
809
- {
810
- "id": "tokyotech-llm/Llama-3.1-Swallow-8B-Instruct-v0.5",
811
- "name": "Llama-3.1-Swallow-8B-Instruct-v0.5",
812
- "created_by": "tokyotech-llm",
813
- "source": "official",
814
- "last_updated": "2026-03-21",
815
- "capabilities": { "streaming": true }
816
- },
817
- {
818
- "id": "unsloth/gemma-2-9b-it",
819
- "name": "gemma-2-9b-it",
820
- "created_by": "unsloth",
821
- "source": "official",
822
- "last_updated": "2026-03-21",
823
- "family": "gemma-2",
824
- "capabilities": { "streaming": true }
825
- },
826
- {
827
- "id": "unsloth/llama-3-8b-Instruct",
828
- "name": "llama-3-8b-Instruct",
829
- "created_by": "unsloth",
830
- "source": "official",
831
- "last_updated": "2026-03-21",
832
- "family": "llama-3",
833
- "capabilities": { "streaming": true }
834
- },
835
- {
836
- "id": "unsloth/Llama-3.1-8B-Instruct",
837
- "name": "Llama-3.1-8B-Instruct",
838
- "created_by": "unsloth",
839
- "source": "official",
840
- "last_updated": "2026-03-21",
841
- "capabilities": { "streaming": true }
842
- },
843
- {
844
- "id": "unsloth/Meta-Llama-3.1-8B-Instruct",
845
- "name": "Meta-Llama-3.1-8B-Instruct",
846
- "created_by": "unsloth",
847
- "source": "official",
848
- "last_updated": "2026-03-21",
849
- "capabilities": { "streaming": true }
850
- },
851
- {
852
- "id": "unsloth/Mistral-Small-24B-Instruct-2501",
853
- "name": "Mistral-Small-24B-Instruct-2501",
854
- "created_by": "unsloth",
855
- "source": "official",
856
- "last_updated": "2026-03-21",
857
- "alias": "unsloth/Mistral-Small-24B-Instruct",
858
- "capabilities": { "streaming": true }
859
- },
860
- {
861
- "id": "unsloth/Mistral-Small-24B-Instruct",
862
- "name": "Mistral-Small-24B-Instruct-2501",
863
- "created_by": "unsloth",
864
- "source": "official",
865
- "last_updated": "2026-03-21",
866
- "capabilities": { "streaming": true },
867
- "snapshots": ["unsloth/Mistral-Small-24B-Instruct-2501"]
868
- },
869
- {
870
- "id": "unsloth/Qwen2.5-14B-Instruct",
871
- "name": "Qwen2.5-14B-Instruct",
872
- "created_by": "unsloth",
873
- "source": "official",
874
- "last_updated": "2026-03-21",
875
- "capabilities": { "streaming": true }
876
- },
877
- {
878
- "id": "unsloth/Qwen2.5-32B-Instruct",
879
- "name": "Qwen2.5-32B-Instruct",
880
- "created_by": "unsloth",
881
- "source": "official",
882
- "last_updated": "2026-03-21",
883
- "capabilities": { "streaming": true }
884
- },
885
- {
886
- "id": "unsloth/Qwen2.5-7B-Instruct",
887
- "name": "Qwen2.5-7B-Instruct",
888
- "created_by": "unsloth",
889
- "source": "official",
890
- "last_updated": "2026-03-21",
891
- "capabilities": { "streaming": true }
892
- },
893
- {
894
- "id": "unsloth/Qwen2.5-Coder-32B-Instruct",
895
- "name": "Qwen2.5-Coder-32B-Instruct",
896
- "created_by": "unsloth",
897
- "source": "official",
898
- "last_updated": "2026-03-21",
899
- "capabilities": { "streaming": true }
900
- },
901
- {
902
- "id": "unsloth/Qwen3-8B",
903
- "name": "Qwen3-8B",
904
- "created_by": "unsloth",
905
- "source": "official",
906
- "last_updated": "2026-03-21",
907
- "capabilities": { "streaming": true }
908
- },
909
- {
910
- "id": "unsloth/tinyllama-chat",
911
- "name": "tinyllama-chat",
912
- "created_by": "unsloth",
913
- "source": "official",
914
- "last_updated": "2026-03-21",
915
- "capabilities": { "streaming": true }
916
- },
917
- {
918
- "id": "VAGOsolutions/Llama-3-SauerkrautLM-8b-Instruct",
919
- "name": "Llama-3-SauerkrautLM-8b-Instruct",
920
- "created_by": "vagosolutions",
921
- "source": "official",
922
- "last_updated": "2026-03-21",
923
- "capabilities": { "streaming": true }
924
- },
925
- {
926
- "id": "zai-org/GLM-4-9B-0414",
927
- "name": "GLM-4-9B-0414",
928
- "created_by": "zhipu",
929
- "source": "official",
930
- "last_updated": "2026-03-21",
931
- "capabilities": { "streaming": true }
932
- },
933
- {
934
- "id": "zai-org/GLM-4.5-Air-FP8",
935
- "name": "GLM-4.5-Air-FP8",
936
- "created_by": "zhipu",
937
- "source": "official",
938
- "last_updated": "2026-03-21",
939
- "capabilities": { "streaming": true }
940
- },
941
- {
942
- "id": "zai-org/GLM-4.5-Air",
943
- "name": "GLM-4.5-Air",
944
- "created_by": "zhipu",
945
- "source": "official",
946
- "last_updated": "2026-03-21",
947
- "capabilities": { "streaming": true }
948
- },
949
- {
950
- "id": "zai-org/GLM-4.5",
951
- "name": "GLM-4.5",
952
- "created_by": "zhipu",
953
- "source": "official",
954
- "last_updated": "2026-03-21",
955
- "capabilities": { "streaming": true }
956
- },
957
- {
958
- "id": "zai-org/GLM-4.6-FP8",
959
- "name": "GLM-4.6-FP8",
960
- "created_by": "zhipu",
961
- "source": "official",
962
- "last_updated": "2026-03-21",
963
- "capabilities": { "streaming": true }
964
- },
965
- {
966
- "id": "zai-org/GLM-4.6",
967
- "name": "GLM-4.6",
968
- "created_by": "zhipu",
969
- "source": "official",
970
- "last_updated": "2026-03-21",
971
- "capabilities": { "streaming": true }
972
- },
973
- {
974
- "id": "zai-org/GLM-4.7-Flash",
975
- "name": "GLM-4.7-Flash",
976
- "created_by": "zhipu",
977
- "source": "official",
978
- "last_updated": "2026-03-21",
979
- "capabilities": { "streaming": true }
980
- },
981
- {
982
- "id": "zai-org/GLM-4.7-FP8",
983
- "name": "GLM-4.7-FP8",
984
- "created_by": "zhipu",
985
- "source": "official",
986
- "last_updated": "2026-03-21",
987
- "capabilities": { "streaming": true }
988
- },
989
- {
990
- "id": "zai-org/GLM-4.7",
991
- "name": "GLM-4.7",
992
- "created_by": "zhipu",
993
- "source": "official",
994
- "last_updated": "2026-03-21",
995
- "capabilities": { "streaming": true }
996
- },
997
- {
998
- "id": "zai-org/GLM-5",
999
- "name": "GLM-5",
1000
- "created_by": "zhipu",
1001
- "source": "official",
1002
- "last_updated": "2026-03-21",
1003
- "capabilities": { "streaming": true }
1004
- },
1005
- {
1006
- "id": "zai-org/GLM-Z1-32B-0414",
1007
- "name": "GLM-Z1-32B-0414",
1008
- "created_by": "zhipu",
1009
- "source": "official",
1010
- "last_updated": "2026-03-21",
1011
- "capabilities": { "streaming": true }
1012
- }
1013
- ]
1014
- };
1015
- const models = provider.models.map((m) => ({
1016
- ...m,
1017
- provider: provider.id
1018
- }));
1019
- //#endregion
1020
- export { models, provider };
1
+ const e={id:`huggingface`,name:`Hugging Face`,region:`US`,headquarters:`New York, NY`,founded:2016,url:`https://huggingface.co`,api_url:`https://router.huggingface.co/v1`,docs_url:`https://huggingface.co/docs/api-inference`,pricing_url:`https://huggingface.co/pricing`,description:`The AI community platform hosting open-source models with serverless inference endpoints.`,type:`aggregator`,sdk:{python:`huggingface_hub`,javascript:`@huggingface/inference`},openai_compatible:!0,free_tier:!0,github_url:`https://github.com/huggingface`,models_url:`https://huggingface.co/models`,twitter_url:`https://x.com/huggingface`,discord_url:`https://discord.com/invite/hugging-face-879548962464493619`,blog_url:`https://huggingface.co/blog`,terms_url:`https://huggingface.co/terms-of-service`,support_url:`https://huggingface.co/support`,icon:`<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="currentColor" fill-rule="evenodd">
2
+ <path d="M16.781 3.277c2.997 1.704 4.844 4.851 4.844 8.258 0 .995-.155 1.955-.443 2.857a1.332 1.332 0 011.125.4 1.41 1.41 0 01.2 1.723c.204.165.352.385.428.632l.017.062c.06.222.12.69-.2 1.166.244.37.279.836.093 1.236-.255.57-.893 1.018-2.128 1.5l-.202.078-.131.048c-.478.173-.89.295-1.061.345l-.086.024c-.89.243-1.808.375-2.732.394-1.32 0-2.3-.36-2.923-1.067a9.852 9.852 0 01-3.18.018C9.778 21.647 8.802 22 7.494 22a11.249 11.249 0 01-2.541-.343l-.221-.06-.273-.08a16.574 16.574 0 01-1.175-.405c-1.237-.483-1.875-.93-2.13-1.501-.186-.4-.151-.867.093-1.236a1.42 1.42 0 01-.2-1.166c.069-.273.226-.516.447-.694a1.41 1.41 0 01.2-1.722c.233-.248.557-.391.917-.407l.078-.001a9.385 9.385 0 01-.44-2.85c0-3.407 1.847-6.554 4.844-8.258a9.822 9.822 0 019.687 0zM4.188 14.758c.125.687 2.357 2.35 2.14 2.707-.19.315-.796-.239-.948-.386l-.041-.04-.168-.147c-.561-.479-2.304-1.9-2.74-1.432-.43.46.119.859 1.055 1.42l.784.467.136.083c1.045.643 1.12.84.95 1.113-.188.295-3.07-2.1-3.34-1.083-.27 1.011 2.942 1.304 2.744 2.006-.2.7-2.265-1.324-2.685-.537-.425.79 2.913 1.718 2.94 1.725l.16.04.175.042c1.227.284 3.565.65 4.435-.604.673-.973.64-1.709-.248-2.61l-.057-.057c-.945-.928-1.495-2.288-1.495-2.288l-.017-.058-.025-.072c-.082-.22-.284-.639-.63-.584-.46.073-.798 1.21.12 1.933l.05.038c.977.721-.195 1.21-.573.534l-.058-.104-.143-.25c-.463-.799-1.282-2.111-1.739-2.397-.532-.332-.907-.148-.782.541zm14.842-.541c-.533.335-1.563 2.074-1.94 2.751a.613.613 0 01-.687.302.436.436 0 01-.176-.098.303.303 0 01-.049-.06l-.014-.028-.008-.02-.007-.019-.003-.013-.003-.017a.289.289 0 01-.004-.048c0-.12.071-.266.25-.427.026-.024.054-.047.084-.07l.047-.036c.022-.016.043-.032.063-.049.883-.71.573-1.81.131-1.917l-.031-.006-.056-.004a.368.368 0 00-.062.006l-.028.005-.042.014-.039.017-.028.015-.028.019-.036.027-.023.02c-.173.158-.273.428-.31.542l-.016.054s-.53 1.309-1.439 2.234l-.054.054c-.365.358-.596.69-.702 1.018-.143.437-.066.868.21 1.353.055.097.117.195.187.296.882 1.275 3.282.876 4.494.59l.286-.07.25-.074c.276-.084.736-.233 1.2-.42l.188-.077.065-.028.064-.028.124-.056.081-.038c.529-.252.964-.543.994-.827l.001-.036a.299.299 0 00-.037-.139c-.094-.176-.271-.212-.491-.168l-.045.01c-.044.01-.09.024-.136.04l-.097.035-.054.022c-.559.23-1.238.705-1.607.745h.006a.452.452 0 01-.05.003h-.024l-.024-.003-.023-.005c-.068-.016-.116-.06-.14-.142a.22.22 0 01-.005-.1c.062-.345.958-.595 1.713-.91l.066-.028c.528-.224.97-.483.985-.832v-.04a.47.47 0 00-.016-.098c-.048-.18-.175-.251-.36-.251-.785 0-2.55 1.36-2.92 1.36-.025 0-.048-.007-.058-.024a.6.6 0 01-.046-.088c-.1-.238.068-.462 1.06-1.066l.209-.126c.538-.32 1.01-.588 1.341-.831.29-.212.475-.406.503-.6l.003-.028c.008-.113-.038-.227-.147-.344a.266.266 0 00-.07-.054l-.034-.015-.013-.005a.403.403 0 00-.13-.02c-.162 0-.369.07-.595.18-.637.313-1.431.952-1.826 1.285l-.249.215-.033.033c-.08.078-.288.27-.493.386l-.071.037-.041.019a.535.535 0 01-.122.036h.005a.346.346 0 01-.031.003l.01-.001-.013.001c-.079.005-.145-.021-.19-.095a.113.113 0 01-.014-.065c.027-.465 2.034-1.991 2.152-2.642l.009-.048c.1-.65-.271-.817-.791-.493zM11.938 2.984c-4.798 0-8.688 3.829-8.688 8.55 0 .692.083 1.364.24 2.008l.008-.009c.252-.298.612-.46 1.017-.46.355.008.699.117.993.312.22.14.465.384.715.694.261-.372.69-.598 1.15-.605.852 0 1.367.728 1.562 1.383l.047.105.06.127c.192.396.595 1.139 1.143 1.68 1.06 1.04 1.324 2.115.8 3.266a8.865 8.865 0 002.024-.014c-.505-1.12-.26-2.17.74-3.186l.066-.066c.695-.684 1.157-1.69 1.252-1.912.195-.655.708-1.383 1.56-1.383.46.007.889.233 1.15.605.25-.31.495-.553.718-.694a1.87 1.87 0 01.99-.312c.357 0 .682.126.925.36.14-.61.215-1.245.215-1.898 0-4.722-3.89-8.55-8.687-8.55zm1.857 8.926l.439-.212c.553-.264.89-.383.89.152 0 1.093-.771 3.208-3.155 3.262h-.184c-2.325-.052-3.116-2.06-3.156-3.175l-.001-.087c0-1.107 1.452.586 3.25.586.716 0 1.379-.272 1.917-.526zm4.017-3.143c.45 0 .813.358.813.8 0 .441-.364.8-.813.8a.806.806 0 01-.812-.8c0-.442.364-.8.812-.8zm-11.624 0c.448 0 .812.358.812.8 0 .441-.364.8-.812.8a.806.806 0 01-.813-.8c0-.442.364-.8.813-.8zm7.79-.841c.32-.384.846-.54 1.33-.394.483.146.83.564.878 1.06.048.495-.212.97-.659 1.203-.322.168-.447-.477-.767-.585l.002-.003c-.287-.098-.772.362-.925.079a1.215 1.215 0 01.14-1.36zm-4.323 0c.322.384.377.92.14 1.36-.152.283-.64-.177-.925-.079l.003.003c-.108.036-.194.134-.273.24l-.118.165c-.11.15-.22.262-.377.18a1.226 1.226 0 01-.658-1.204c.048-.495.395-.913.878-1.059a1.262 1.262 0 011.33.394z"/>
3
+ </svg>`,models:[{id:`aaditya/Llama3-OpenBioLLM-8B`,name:`Llama3-OpenBioLLM-8B`,created_by:`aaditya`,source:`official`,last_updated:`2026-03-23`,page_url:`https://huggingface.co/aaditya/Llama3-OpenBioLLM-8B`,model_type:`chat`,capabilities:{streaming:!0},description:`Introducing OpenBioLLM-8B: A State-of-the-Art Open Source Biomedical Large Language Model`,release_date:`2024-04-20`,context_window:8192,license:`llama3`,tagline:`Introducing OpenBioLLM-8B: A State-of-the-Art Open Source Biomedical Large Language Model`,architecture:`transformer`,open_weight:!0},{id:`allenai/Olmo-3-7B-Instruct`,name:`Olmo-3-7B-Instruct`,created_by:`allenai`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},parameters:0,page_url:`https://huggingface.co/allenai/Olmo-3-7B-Instruct`,release_date:`2025-11-19`,context_window:65536,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`allenai/Olmo-3.1-32B-Instruct`,name:`Olmo-3.1-32B-Instruct`,created_by:`allenai`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},parameters:32,page_url:`https://huggingface.co/allenai/Olmo-3.1-32B-Instruct`,release_date:`2025-12-10`,context_window:65536,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`argilla/Llama-3.2-1B-Instruct-APIGen-FC-v0.1`,name:`Llama-3.2-1B-Instruct-APIGen-FC-v0.1`,created_by:`argilla`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-3.2`,model_type:`chat`,parameters:1,page_url:`https://huggingface.co/argilla/Llama-3.2-1B-Instruct-APIGen-FC-v0.1`,description:`This model is a fine-tuned version of [meta-llama/Llama-3.2-1B-Instruct](https://huggingface.co/meta-llama/Llama-3.2-1B-Instruct) on [argilla-warehouse/apigen-synth-trl](https://huggingface.co/datasets/argilla-warehouse/apigen-synth-trl) dataset, a version of [argilla/Synth-APIGen-v0.1](https://huggingface.co/datasets/argilla-warehouse/Synth-APIGen-v0.1) ready to do SFT on top of it. It has been trained using [TRL](https://github.com/huggingface/trl).`,release_date:`2024-10-07`,context_window:131072,license:`apache-2.0`,tagline:`This model is a fine-tuned version of [meta-llama/Llama-3.2-1B-Instruct](https://huggingface.co/meta-llama/Llama-3.2-1B-Instruct) on [argilla-warehouse/apigen-synth-trl](https://huggingface.co/datasets/argilla-warehouse/apigen-synth-trl) dataset, a version of [argilla/Synth-APIGen-v0.1](https://huggingface.co/datasets/argilla-warehouse/Synth-APIGen-v0.1) ready to do SFT on top of it.`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`argilla-warehouse/Llama-3.2-1B-Instruct-v2-FC`,name:`Llama-3.2-1B-Instruct-v2-FC`,created_by:`argilla-warehouse`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-3.2`,model_type:`chat`,parameters:1,page_url:`https://huggingface.co/argilla-warehouse/Llama-3.2-1B-Instruct-v2-FC`,description:`This model is a fine-tuned version of [meta-llama/Llama-3.2-1B-Instruct](https://huggingface.co/meta-llama/Llama-3.2-1B-Instruct) on the [argilla-warehouse/apigen-smollm-trl-FC](https://huggingface.co/datasets/argilla-warehouse/apigen-smollm-trl-FC) dataset. It has been trained using [TRL](https://github.com/huggingface/trl).`,release_date:`2024-10-20`,context_window:131072,tagline:`This model is a fine-tuned version of [meta-llama/Llama-3.2-1B-Instruct](https://huggingface.co/meta-llama/Llama-3.2-1B-Instruct) on the [argilla-warehouse/apigen-smollm-trl-FC](https://huggingface.co/datasets/argilla-warehouse/apigen-smollm-trl-FC) dataset.`,architecture:`transformer`,tools:[`function_calling`]},{id:`DeepMount00/Llama-3-8b-Ita`,name:`Llama-3-8b-Ita`,created_by:`deepmount00`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-3`,model_type:`chat`,parameters:8,page_url:`https://huggingface.co/DeepMount00/Llama-3-8b-Ita`,description:`**💡 Found this resource helpful?** Creating and maintaining open source AI models and datasets requires significant computational resources. If this work has been valuable to you, consider [supporting my research](https://buymeacoffee.com/michele.montebovi) to help me continue building tools that benefit the entire AI community. Every contribution directly funds more open source innovation! ☕`,release_date:`2024-05-01`,context_window:8192,license:`llama3`,tagline:`**💡 Found this resource helpful?** Creating and maintaining open source AI models and datasets requires significant computational resources.`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`deepseek-ai/DeepSeek-R1-0528-Qwen3-8B`,name:`DeepSeek-R1-0528-Qwen3-8B`,created_by:`deepseek`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`reasoning`,parameters:8.2,page_url:`https://huggingface.co/deepseek-ai/DeepSeek-R1-0528-Qwen3-8B`,release_date:`2025-05-29`,context_window:131072,license:`mit`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Efficient-Large-Model/gemma-2-2b-it`,name:`gemma-2-2b-it`,created_by:`efficient-large-model`,source:`official`,last_updated:`2026-03-23`,family:`gemma-2`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:2.6,page_url:`https://huggingface.co/Efficient-Large-Model/gemma-2-2b-it`,description:`**Model Page**: [Gemma](https://ai.google.dev/gemma/docs/base)`,release_date:`2024-12-12`,context_window:8192,license:`gemma`,tagline:`**Model Page**: [Gemma](https://ai.google.dev/gemma/docs/base)`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`EssentialAI/rnj-1-instruct`,name:`rnj-1-instruct`,created_by:`essentialai`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},page_url:`https://huggingface.co/EssentialAI/rnj-1-instruct`,description:`Rnj-1 is a family of 8B parameter open-weight, dense models trained from scratch by Essential AI, optimized for code and STEM with capabilities on par with SOTA open-weight models. These models perform well across a range of programming languages and boast strong agentic capabilities (e.g., inside agentic frameworks like mini-SWE-agent), while also excelling at tool-calling. They additionally exhibit strong capabilities in math and science.`,release_date:`2025-12-04`,context_window:32768,license:`apache-2.0`,parameters:8.3,tagline:`style="vertical-align: middle;" src="https://img.shields.io/badge/%F0%9F%8C%90%20Website-essential.ai-4b9fe1?color=4b9fe1&logoColor=white"/>`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`failspy/Meta-Llama-3-8B-Instruct-abliterated-v3`,name:`Meta-Llama-3-8B-Instruct-abliterated-v3`,created_by:`failspy`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-3`,model_type:`chat`,parameters:8,page_url:`https://huggingface.co/failspy/Meta-Llama-3-8B-Instruct-abliterated-v3`,description:`[My Jupyter "cookbook" to replicate the methodology can be found here, refined library coming soon](https://huggingface.co/failspy/llama-3-70B-Instruct-abliterated/blob/main/ortho_cookbook.ipynb)`,release_date:`2024-05-20`,context_window:8192,license:`llama3`,tagline:`[My Jupyter "cookbook" to replicate the methodology can be found here, refined library coming soon](https://huggingface.co/failspy/llama-3-70B-Instruct-abliterated/blob/main/ortho_cookbook.ipynb)`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Featherless-Chat-Models/Mistral-7B-Instruct-v0.2`,name:`Mistral-7B-Instruct-v0.2`,created_by:`featherless-chat-models`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:7.2,page_url:`https://huggingface.co/Featherless-Chat-Models/Mistral-7B-Instruct-v0.2`,release_date:`2025-05-08`,context_window:32768,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`FlagAlpha/Llama3-Chinese-8B-Instruct`,name:`Llama3-Chinese-8B-Instruct`,created_by:`flagalpha`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:8,page_url:`https://huggingface.co/FlagAlpha/Llama3-Chinese-8B-Instruct`,description:`Llama3-Chinese-8B-Instruct基于Llama3-8B中文微调对话模型,由Llama中文社区和AtomEcho(原子回声)联合研发,我们会持续提供更新的模型参数,模型训练过程见 [https://llama.family](https://llama.family)。`,release_date:`2024-04-23`,context_window:8192,license:`apache-2.0`,tagline:`Llama3-Chinese-8B-Instruct基于Llama3-8B中文微调对话模型,由Llama中文社区和AtomEcho(原子回声)联合研发,我们会持续提供更新的模型参数,模型训练过程见 [https://llama.family](https://llama.family)。`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`google/gemma-2-2b-jpn-it`,name:`gemma-2-2b-jpn-it`,created_by:`google`,source:`official`,last_updated:`2026-03-23`,family:`gemma-2`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:2.6,page_url:`https://huggingface.co/google/gemma-2-2b-jpn-it`,release_date:`2024-09-25`,license:`gemma`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`google/gemma-2-9b-it`,name:`gemma-2-9b-it`,created_by:`google`,source:`official`,last_updated:`2026-03-23`,family:`gemma-2`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:9.2,page_url:`https://huggingface.co/google/gemma-2-9b-it`,release_date:`2024-06-24`,license:`gemma`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`GraySwanAI/Llama-3-8B-Instruct-RR`,name:`Llama-3-8B-Instruct-RR`,created_by:`grayswanai`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-3`,model_type:`chat`,parameters:8,page_url:`https://huggingface.co/GraySwanAI/Llama-3-8B-Instruct-RR`,description:`Llama-3-8B-Instruct-RR is a Llama-3 model with circuit breakers inserted using Representation Rerouting (RR).`,release_date:`2024-07-08`,context_window:8192,tagline:`Llama-3-8B-Instruct-RR is a Llama-3 model with circuit breakers inserted using Representation Rerouting (RR).`,architecture:`transformer`,tools:[`function_calling`]},{id:`GritLM/GritLM-7B`,name:`GritLM-7B`,created_by:`gritlm`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},parameters:7.2,page_url:`https://huggingface.co/GritLM/GritLM-7B`,description:`> GritLM is a generative representational instruction tuned language model. It unifies text representation (embedding) and text generation into a single model achieving state-of-the-art performance on both types of tasks.`,release_date:`2024-02-11`,context_window:32768,license:`apache-2.0`,tagline:`> GritLM is a generative representational instruction tuned language model.`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`huihui-ai/Qwen2.5-72B-Instruct-abliterated`,name:`Qwen2.5-72B-Instruct-abliterated`,created_by:`huihui-ai`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:73,page_url:`https://huggingface.co/huihui-ai/Qwen2.5-72B-Instruct-abliterated`,description:`This is an uncensored version of [Qwen/Qwen2.5-72B-Instruct](https://huggingface.co/Qwen/Qwen2.5-72B-Instruct) created with abliteration (see [remove-refusals-with-transformers](https://github.com/Sumandora/remove-refusals-with-transformers) to know more about it). This is a crude, proof-of-concept implementation to remove refusals from an LLM model without using TransformerLens.`,release_date:`2024-10-26`,context_window:32768,license:`other`,tagline:`This is an uncensored version of [Qwen/Qwen2.5-72B-Instruct](https://huggingface.co/Qwen/Qwen2.5-72B-Instruct) created with abliteration (see [remove-refusals-with-transformers](https://github.com/Sumandora/remove-refusals-with-transformers) to know more about it).`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`instruction-pretrain/finance-Llama3-8B`,name:`finance-Llama3-8B`,created_by:`instruction-pretrain`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0},parameters:8,page_url:`https://huggingface.co/instruction-pretrain/finance-Llama3-8B`,description:`This repo contains the **finance model developed from Llama3-8B** in our paper [Instruction Pre-Training: Language Models are Supervised Multitask Learners](https://huggingface.co/papers/2406.14491).`,release_date:`2024-06-18`,context_window:8192,license:`llama3`,tagline:`This repo contains the **finance model developed from Llama3-8B** in our paper [Instruction Pre-Training: Language Models are Supervised Multitask Learners](https://huggingface.co/papers/2406.14491).`,architecture:`transformer`,open_weight:!0},{id:`Intel/neural-chat-7b-v3-3`,name:`neural-chat-7b-v3-3`,created_by:`intel`,source:`official`,last_updated:`2026-03-22`,capabilities:{streaming:!0,tool_call:!0},parameters:7.2,page_url:`https://huggingface.co/Intel/neural-chat-7b-v3-3`,release_date:`2023-12-09`,context_window:32768,license:`apache-2.0`},{id:`invalid-coder/Sakura-SOLAR-Instruct-CarbonVillain-en-10.7B-v2-slerp`,name:`Sakura-SOLAR-Instruct-CarbonVillain-en-10.7B-v2-slerp`,created_by:`invalid-coder`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},parameters:11,page_url:`https://huggingface.co/invalid-coder/Sakura-SOLAR-Instruct-CarbonVillain-en-10.7B-v2-slerp`,description:`NeuralPipe-7B-slerp is a merge of the following models using [LazyMergekit](https://colab.research.google.com/drive/1obulZ1ROXHjYLn6PPZJwRR6GzgQogxxb?usp=sharing): * [jeonsworld/CarbonVillain-en-10.7B-v2](https://huggingface.co/jeonsworld/CarbonVillain-en-10.7B-v2) * [kyujinpy/Sakura-SOLAR-Instruct](https://huggingface.co/kyujinpy/Sakura-SOLAR-Instruct)`,release_date:`2024-01-10`,context_window:4096,license:`apache-2.0`,tagline:`NeuralPipe-7B-slerp is a merge of the following models using [LazyMergekit](https://colab.research.google.com/drive/1obulZ1ROXHjYLn6PPZJwRR6GzgQogxxb?usp=sharing): * [jeonsworld/CarbonVillain-en-10.7B`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`m8than/Mistral-Nemo-Instruct-2407-lenient-chatfix`,name:`Mistral-Nemo-Instruct-2407-lenient-chatfix`,created_by:`m8than`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,page_url:`https://huggingface.co/m8than/Mistral-Nemo-Instruct-2407-lenient-chatfix`,description:`It's just mistral nemo 2407 with a less strict chat format.`,release_date:`2025-05-06`,context_window:131072,license:`apache-2.0`,parameters:12,tagline:`It's just mistral nemo 2407 with a less strict chat format.`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`meta-llama/Llama-2-13b-chat-hf`,name:`Llama-2-13b-chat-hf`,created_by:`meta`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-2`,model_type:`chat`,parameters:13,page_url:`https://huggingface.co/meta-llama/Llama-2-13b-chat-hf`,release_date:`2023-07-13`,license:`llama2`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`],context_window:4096,max_output_tokens:4096,modalities:{input:[`text`],output:[`text`]}},{id:`meta-llama/Llama-3.1-70B-Instruct`,name:`Llama-3.1-70B-Instruct`,created_by:`meta`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-3.1`,model_type:`chat`,parameters:71,page_url:`https://huggingface.co/meta-llama/Llama-3.1-70B-Instruct`,release_date:`2024-07-16`,license:`llama3.1`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`],context_window:131072,max_output_tokens:131072,knowledge_cutoff:`2023-12`,modalities:{input:[`text`],output:[`text`]}},{id:`meta-llama/Llama-3.1-8B-Instruct`,name:`Llama-3.1-8B-Instruct`,created_by:`meta`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-3.1`,model_type:`chat`,parameters:8,page_url:`https://huggingface.co/meta-llama/Llama-3.1-8B-Instruct`,release_date:`2024-07-18`,license:`llama3.1`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`],context_window:131072,max_output_tokens:131072,knowledge_cutoff:`2023-12`,modalities:{input:[`text`],output:[`text`]}},{id:`meta-llama/Llama-3.2-1B-Instruct`,name:`Llama-3.2-1B-Instruct`,created_by:`meta`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-3.2`,model_type:`chat`,parameters:1.2,page_url:`https://huggingface.co/meta-llama/Llama-3.2-1B-Instruct`,release_date:`2024-09-18`,license:`llama3.2`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`],context_window:131072,max_output_tokens:131072,knowledge_cutoff:`2023-12`,modalities:{input:[`text`],output:[`text`]}},{id:`meta-llama/Llama-3.3-70B-Instruct`,name:`Llama-3.3-70B-Instruct`,created_by:`meta`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-3.3`,model_type:`chat`,parameters:71,page_url:`https://huggingface.co/meta-llama/Llama-3.3-70B-Instruct`,release_date:`2024-11-26`,license:`llama3.3`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`],context_window:131072,max_output_tokens:131072,knowledge_cutoff:`2023-12`,modalities:{input:[`text`],output:[`text`]}},{id:`meta-llama/Meta-Llama-3-70B-Instruct`,name:`Meta-Llama-3-70B-Instruct`,created_by:`meta`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-3`,model_type:`chat`,parameters:71,page_url:`https://huggingface.co/meta-llama/Meta-Llama-3-70B-Instruct`,release_date:`2024-04-17`,license:`llama3`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`],context_window:8192,max_output_tokens:8192,modalities:{input:[`text`],output:[`text`]}},{id:`meta-llama/Meta-Llama-3-8B-Instruct`,name:`Meta-Llama-3-8B-Instruct`,created_by:`meta`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-3`,model_type:`chat`,parameters:8,page_url:`https://huggingface.co/meta-llama/Meta-Llama-3-8B-Instruct`,release_date:`2024-04-17`,license:`llama3`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`],context_window:8192,max_output_tokens:8192,modalities:{input:[`text`],output:[`text`]}},{id:`mistralai/Mistral-7B-Instruct-v0.2`,name:`Mistral-7B-Instruct-v0.2`,created_by:`mistral`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:7.2,page_url:`https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2`,release_date:`2023-12-11`,context_window:32768,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`mlabonne/Meta-Llama-3.1-8B-Instruct-abliterated`,name:`Meta-Llama-3.1-8B-Instruct-abliterated`,created_by:`mlabonne`,source:`official`,last_updated:`2026-03-23`,family:`Llama-3.1`,page_url:`https://huggingface.co/mlabonne/Meta-Llama-3.1-8B-Instruct-abliterated`,model_type:`chat`,capabilities:{streaming:!0,tool_call:!0},description:`This is an uncensored version of Llama 3.1 8B Instruct created with abliteration (see [this article](https://huggingface.co/blog/mlabonne/abliteration) to know more about it).`,release_date:`2024-07-24`,context_window:131072,license:`llama3.1`,parameters:8,tagline:`This is an uncensored version of Llama 3.1 8B Instruct created with abliteration (see [this article](https://huggingface.co/blog/mlabonne/abliteration) to know more about it).`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`mlabonne/NeuralDaredevil-8B-abliterated`,name:`NeuralDaredevil-8B-abliterated`,created_by:`mlabonne`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},parameters:8,page_url:`https://huggingface.co/mlabonne/NeuralDaredevil-8B-abliterated`,description:`This is a DPO fine-tune of [mlabonne/Daredevil-8-abliterated](https://huggingface.co/mlabonne/Daredevil-8B-abliterated), trained on one epoch of [mlabonne/orpo-dpo-mix-40k](https://huggingface.co/datasets/mlabonne/orpo-dpo-mix-40k). The DPO fine-tuning successfully recovers the performance loss due to the abliteration process, making it an excellent uncensored model.`,release_date:`2024-05-27`,context_window:8192,license:`llama3`,tagline:`This is a DPO fine-tune of [mlabonne/Daredevil-8-abliterated](https://huggingface.co/mlabonne/Daredevil-8B-abliterated), trained on one epoch of [mlabonne/orpo-dpo-mix-40k](https://huggingface.co/datasets/mlabonne/orpo-dpo-mix-40k).`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`moonshotai/Kimi-K2-Instruct-0905`,name:`Kimi-K2-Instruct-0905`,created_by:`moonshot`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Kimi-K2`,model_type:`chat`,page_url:`https://huggingface.co/moonshotai/Kimi-K2-Instruct-0905`,release_date:`2025-09-03`,context_window:262144,license:`other`,parameters:1026,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`moonshotai/Kimi-K2-Instruct`,name:`Kimi-K2-Instruct`,created_by:`moonshot`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Kimi-K2`,model_type:`chat`,page_url:`https://huggingface.co/moonshotai/Kimi-K2-Instruct`,release_date:`2025-07-11`,context_window:131072,license:`other`,parameters:1026,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`moonshotai/Kimi-K2-Thinking`,name:`Kimi-K2-Thinking`,created_by:`moonshot`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0,reasoning:!0},family:`Kimi-K2`,model_type:`chat`,page_url:`https://huggingface.co/moonshotai/Kimi-K2-Thinking`,release_date:`2025-11-04`,context_window:262144,license:`other`,parameters:1058,architecture:`transformer`,open_weight:!0,tools:[`function_calling`],description:`Long-term thinking, multi-step tool usage, complex problem solving.`,tagline:`Long-term thinking, multi-step tool usage, complex problem solving.`},{id:`NousResearch/Meta-Llama-3-70B-Instruct`,name:`Meta-Llama-3-70B-Instruct`,created_by:`nousresearch`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-3`,model_type:`chat`,parameters:71,page_url:`https://huggingface.co/NousResearch/Meta-Llama-3-70B-Instruct`,release_date:`2024-04-19`,context_window:8192,license:`other`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`NousResearch/Meta-Llama-3-8B-Instruct`,name:`Meta-Llama-3-8B-Instruct`,created_by:`nousresearch`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-3`,model_type:`chat`,parameters:8,page_url:`https://huggingface.co/NousResearch/Meta-Llama-3-8B-Instruct`,release_date:`2024-04-18`,context_window:8192,license:`other`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`NousResearch/Meta-Llama-3.1-8B-Instruct`,name:`Meta-Llama-3.1-8B-Instruct`,created_by:`nousresearch`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-3.1`,model_type:`chat`,parameters:8,page_url:`https://huggingface.co/NousResearch/Meta-Llama-3.1-8B-Instruct`,release_date:`2024-07-24`,context_window:131072,license:`llama3.1`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`nvidia/Llama-3.1-8B-Instruct-FP8`,name:`Llama-3.1-8B-Instruct-FP8`,created_by:`nvidia`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-3.1`,model_type:`chat`,parameters:8,page_url:`https://huggingface.co/nvidia/Llama-3.1-8B-Instruct-FP8`,release_date:`2024-08-29`,context_window:131072,license:`llama3.1`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`openai/gpt-oss-120b`,name:`gpt-oss-120b`,created_by:`openai`,source:`official`,last_updated:`2026-03-23`,family:`gpt-oss`,capabilities:{streaming:!0,tool_call:!0,structured_output:!0,reasoning:!0},model_type:`chat`,parameters:120,page_url:`https://huggingface.co/openai/gpt-oss-120b`,description:`Welcome to the gpt-oss series, [OpenAI’s open-weight models](https://openai.com/open-models) designed for powerful reasoning, agentic tasks, and versatile developer use cases.`,release_date:`2025-08-04`,context_window:131072,license:`apache-2.0`,tagline:`Welcome to the gpt-oss series, [OpenAI’s open-weight models](https://openai.com/open-models) designed for powerful reasoning, agentic tasks, and versatile developer use cases.`,architecture:`moe`,open_weight:!0,tools:[`function_calling`],max_output_tokens:131072,knowledge_cutoff:`2024-05`,modalities:{input:[`text`],output:[`text`]}},{id:`openai/gpt-oss-20b`,name:`gpt-oss-20b`,created_by:`openai`,source:`official`,last_updated:`2026-03-23`,family:`gpt-oss`,capabilities:{streaming:!0,tool_call:!0,structured_output:!0,reasoning:!0},model_type:`chat`,parameters:22,page_url:`https://huggingface.co/openai/gpt-oss-20b`,description:`Welcome to the gpt-oss series, [OpenAI’s open-weight models](https://openai.com/open-models) designed for powerful reasoning, agentic tasks, and versatile developer use cases.`,release_date:`2025-08-04`,context_window:131072,license:`apache-2.0`,tagline:`Welcome to the gpt-oss series, [OpenAI’s open-weight models](https://openai.com/open-models) designed for powerful reasoning, agentic tasks, and versatile developer use cases.`,architecture:`moe`,open_weight:!0,tools:[`function_calling`],max_output_tokens:131072,knowledge_cutoff:`2024-05`,modalities:{input:[`text`],output:[`text`]}},{id:`openai/gpt-oss-safeguard-20b`,name:`gpt-oss-safeguard-20b`,created_by:`openai`,source:`official`,last_updated:`2026-03-23`,family:`gpt-oss`,capabilities:{streaming:!0,tool_call:!0},model_type:`moderation`,parameters:20,page_url:`https://huggingface.co/openai/gpt-oss-safeguard-20b`,description:"`gpt-oss-safeguard-120b` and `gpt-oss-safeguard-20b` are safety reasoning models built-upon gpt-oss. With these models, you can classify text content based on safety policies that you provide and perform a suite of foundational safety tasks. These models are intended for safety use cases. For other applications, we recommend using [gpt-oss models](https://huggingface.co/collections/openai/gpt-oss).",release_date:`2025-09-18`,context_window:131072,license:`apache-2.0`,tagline:"`gpt-oss-safeguard-120b` and `gpt-oss-safeguard-20b` are safety reasoning models built-upon gpt-oss.",architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`openchat/openchat-3.6-8b-20240522`,name:`openchat-3.6-8b-20240522`,created_by:`openchat`,source:`official`,last_updated:`2026-03-23`,alias:`openchat/openchat-3.6-8b`,capabilities:{streaming:!0,tool_call:!0},parameters:8,page_url:`https://huggingface.co/openchat/openchat-3.6-8b-20240522`,description:`To use this model, we highly recommend installing the OpenChat package by following the [installation guide](https://github.com/imoneoi/openchat#installation) in our repository and using the OpenChat OpenAI-compatible API server by running the serving command from the table below. The server is optimized for high-throughput deployment using [vLLM](https://github.com/vllm-project/vllm) and can run on a consumer GPU with 24GB RAM.`,release_date:`2024-05-07`,context_window:8192,license:`llama3`,tagline:`To use this model, we highly recommend installing the OpenChat package by following the [installation guide](https://github.com/imoneoi/openchat#installation) in our repository and using the OpenChat OpenAI-compatible API server by running the serving command from the table below.`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`openchat/openchat-3.6-8b`,name:`openchat-3.6-8b-20240522`,created_by:`openchat`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},snapshots:[`openchat/openchat-3.6-8b-20240522`],parameters:8,page_url:`https://huggingface.co/openchat/openchat-3.6-8b-20240522`,description:`To use this model, we highly recommend installing the OpenChat package by following the [installation guide](https://github.com/imoneoi/openchat#installation) in our repository and using the OpenChat OpenAI-compatible API server by running the serving command from the table below. The server is optimized for high-throughput deployment using [vLLM](https://github.com/vllm-project/vllm) and can run on a consumer GPU with 24GB RAM.`,release_date:`2024-05-07`,context_window:8192,license:`llama3`,tagline:`To use this model, we highly recommend installing the OpenChat package by following the [installation guide](https://github.com/imoneoi/openchat#installation) in our repository and using the OpenChat OpenAI-compatible API server by running the serving command from the table below.`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Orion-zhen/Qwen2.5-7B-Instruct-Uncensored`,name:`Qwen2.5-7B-Instruct-Uncensored`,created_by:`orion-zhen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:7.6,page_url:`https://huggingface.co/Orion-zhen/Qwen2.5-7B-Instruct-Uncensored`,description:`This model is an uncensored fine-tune version of Qwen2.5-7B-Instruct. However, I can still notice that though uncensored, the model fails to generate detailed descriptions on certain extreme scenarios, which might be associated with deletion on some pretrain datasets in Qwen's pretraining stage.`,release_date:`2024-09-26`,context_window:32768,license:`gpl-3.0`,tagline:`This model is an uncensored fine-tune version of Qwen2.5-7B-Instruct.`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`PatronusAI/Llama-3-Patronus-Lynx-8B-Instruct-v1.1`,name:`Llama-3-Patronus-Lynx-8B-Instruct-v1.1`,created_by:`patronusai`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-3`,model_type:`chat`,parameters:8,page_url:`https://huggingface.co/PatronusAI/Llama-3-Patronus-Lynx-8B-Instruct-v1.1`,description:`Lynx is an open-source hallucination evaluation model. Patronus-Lynx-8B-Instruct-v1.1 was trained on a mix of datasets including CovidQA, PubmedQA, DROP, RAGTruth. The datasets contain a mix of hand-annotated and synthetic data. The maximum sequence length is 128000 tokens.`,release_date:`2024-07-24`,context_window:131072,license:`cc-by-nc-4.0`,tagline:`Lynx is an open-source hallucination evaluation model.`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen1.5-1.8B-Chat`,name:`Qwen1.5-1.8B-Chat`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:1.8,page_url:`https://huggingface.co/Qwen/Qwen1.5-1.8B-Chat`,release_date:`2024-01-30`,context_window:32768,license:`other`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen2-1.5B-Instruct`,name:`Qwen2-1.5B-Instruct`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:1.5,page_url:`https://huggingface.co/Qwen/Qwen2-1.5B-Instruct`,release_date:`2024-06-03`,context_window:32768,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen2-72B-Instruct`,name:`Qwen2-72B-Instruct`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:73,page_url:`https://huggingface.co/Qwen/Qwen2-72B-Instruct`,release_date:`2024-05-28`,context_window:32768,license:`other`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`],max_output_tokens:6144,modalities:{input:[`text`],output:[`text`]}},{id:`Qwen/Qwen2-7B-Instruct`,name:`Qwen2-7B-Instruct`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:7.6,page_url:`https://huggingface.co/Qwen/Qwen2-7B-Instruct`,release_date:`2024-06-04`,context_window:32768,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`],modalities:{input:[`text`],output:[`text`]}},{id:`Qwen/Qwen2.5-1.5B-Instruct`,name:`Qwen2.5-1.5B-Instruct`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:1.5,page_url:`https://huggingface.co/Qwen/Qwen2.5-1.5B-Instruct`,release_date:`2024-09-17`,context_window:32768,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen2.5-14B-Instruct-1M`,name:`Qwen2.5-14B-Instruct-1M`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:15,page_url:`https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M`,release_date:`2025-01-23`,context_window:101e4,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`],max_output_tokens:8192,modalities:{input:[`text`],output:[`text`]}},{id:`Qwen/Qwen2.5-14B-Instruct`,name:`Qwen2.5-14B-Instruct`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:14,page_url:`https://huggingface.co/Qwen/Qwen2.5-14B-Instruct`,release_date:`2024-09-16`,context_window:32768,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`],modalities:{input:[`text`],output:[`text`]}},{id:`Qwen/Qwen2.5-32B-Instruct`,name:`Qwen2.5-32B-Instruct`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:32,page_url:`https://huggingface.co/Qwen/Qwen2.5-32B-Instruct`,release_date:`2024-09-17`,context_window:32768,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`],modalities:{input:[`text`],output:[`text`]}},{id:`Qwen/Qwen2.5-72B-Instruct`,name:`Qwen2.5-72B-Instruct`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:73,page_url:`https://huggingface.co/Qwen/Qwen2.5-72B-Instruct`,release_date:`2024-09-16`,context_window:32768,license:`other`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`],modalities:{input:[`text`],output:[`text`]}},{id:`Qwen/Qwen2.5-7B-Instruct-1M`,name:`Qwen2.5-7B-Instruct-1M`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:7.6,page_url:`https://huggingface.co/Qwen/Qwen2.5-7B-Instruct-1M`,release_date:`2025-01-23`,context_window:101e4,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`],modalities:{input:[`text`],output:[`text`]}},{id:`Qwen/Qwen2.5-7B-Instruct`,name:`Qwen2.5-7B-Instruct`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:7.6,page_url:`https://huggingface.co/Qwen/Qwen2.5-7B-Instruct`,release_date:`2024-09-16`,context_window:32768,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`],modalities:{input:[`text`],output:[`text`]}},{id:`Qwen/Qwen2.5-Coder-1.5B-Instruct`,name:`Qwen2.5-Coder-1.5B-Instruct`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`code`,parameters:1.5,page_url:`https://huggingface.co/Qwen/Qwen2.5-Coder-1.5B-Instruct`,release_date:`2024-09-18`,context_window:32768,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen2.5-Coder-1.5B`,name:`Qwen2.5-Coder-1.5B`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`code`,parameters:1.5,page_url:`https://huggingface.co/Qwen/Qwen2.5-Coder-1.5B`,release_date:`2024-09-18`,context_window:32768,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen2.5-Coder-14B-Instruct`,name:`Qwen2.5-Coder-14B-Instruct`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`code`,parameters:15,page_url:`https://huggingface.co/Qwen/Qwen2.5-Coder-14B-Instruct`,release_date:`2024-11-06`,context_window:32768,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen2.5-Coder-14B`,name:`Qwen2.5-Coder-14B`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`code`,parameters:15,page_url:`https://huggingface.co/Qwen/Qwen2.5-Coder-14B`,release_date:`2024-11-08`,context_window:32768,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen2.5-Coder-32B-Instruct`,name:`Qwen2.5-Coder-32B-Instruct`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`code`,parameters:33,page_url:`https://huggingface.co/Qwen/Qwen2.5-Coder-32B-Instruct`,release_date:`2024-11-06`,context_window:32768,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen2.5-Coder-32B`,name:`Qwen2.5-Coder-32B`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`code`,parameters:33,page_url:`https://huggingface.co/Qwen/Qwen2.5-Coder-32B`,release_date:`2024-11-08`,context_window:32768,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen2.5-Coder-3B-Instruct`,name:`Qwen2.5-Coder-3B-Instruct`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`code`,parameters:3.1,page_url:`https://huggingface.co/Qwen/Qwen2.5-Coder-3B-Instruct`,release_date:`2024-11-06`,context_window:32768,license:`other`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen2.5-Coder-7B-Instruct`,name:`Qwen2.5-Coder-7B-Instruct`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`code`,parameters:7.6,page_url:`https://huggingface.co/Qwen/Qwen2.5-Coder-7B-Instruct`,release_date:`2024-09-17`,context_window:32768,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen2.5-Coder-7B`,name:`Qwen2.5-Coder-7B`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`code`,parameters:7.6,page_url:`https://huggingface.co/Qwen/Qwen2.5-Coder-7B`,release_date:`2024-09-16`,context_window:32768,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen2.5-Math-1.5B-Instruct`,name:`Qwen2.5-Math-1.5B-Instruct`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:1.5,page_url:`https://huggingface.co/Qwen/Qwen2.5-Math-1.5B-Instruct`,description:`> [!Warning] > <div align="center"> > <b> > 🚨 Qwen2.5-Math mainly supports solving English and Chinese math problems through CoT and TIR. We do not recommend using this series of models for other tasks. > </b> > </div>`,release_date:`2024-09-16`,context_window:4096,license:`apache-2.0`,tagline:`> [!Warning] > <div align="center"> > <b> > 🚨 Qwen2.5-Math mainly supports solving English and Chinese math problems through CoT and TIR.`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen2.5-Math-7B-Instruct`,name:`Qwen2.5-Math-7B-Instruct`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:7.6,page_url:`https://huggingface.co/Qwen/Qwen2.5-Math-7B-Instruct`,description:`> [!Warning] > <div align="center"> > <b> > 🚨 Qwen2.5-Math mainly supports solving English and Chinese math problems through CoT and TIR. We do not recommend using this series of models for other tasks. > </b> > </div>`,release_date:`2024-09-19`,context_window:4096,license:`apache-2.0`,tagline:`> [!Warning] > <div align="center"> > <b> > 🚨 Qwen2.5-Math mainly supports solving English and Chinese math problems through CoT and TIR.`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`],modalities:{input:[`text`],output:[`text`]}},{id:`Qwen/Qwen3-1.7B`,name:`Qwen3-1.7B`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:2,page_url:`https://huggingface.co/Qwen/Qwen3-1.7B`,release_date:`2025-04-27`,context_window:40960,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-14B-Base`,name:`Qwen3-14B-Base`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:15,page_url:`https://huggingface.co/Qwen/Qwen3-14B-Base`,release_date:`2025-04-28`,context_window:32768,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-14B`,name:`Qwen3-14B`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:14,page_url:`https://huggingface.co/Qwen/Qwen3-14B`,release_date:`2025-04-27`,context_window:40960,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-235B-A22B-Instruct-2507`,name:`Qwen3-235B-A22B-Instruct-2507`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,alias:`Qwen/Qwen3-235B-A22B-Instruct`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:235,active_parameters:22,page_url:`https://huggingface.co/Qwen/Qwen3-235B-A22B-Instruct-2507`,release_date:`2025-07-21`,context_window:262144,license:`apache-2.0`,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-235B-A22B-Instruct`,name:`Qwen3-235B-A22B-Instruct-2507`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},snapshots:[`Qwen/Qwen3-235B-A22B-Instruct-2507`],model_type:`chat`,parameters:235,active_parameters:22,page_url:`https://huggingface.co/Qwen/Qwen3-235B-A22B-Instruct-2507`,release_date:`2025-07-21`,context_window:262144,license:`apache-2.0`,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-235B-A22B-Thinking-2507`,name:`Qwen3-235B-A22B-Thinking-2507`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,alias:`Qwen/Qwen3-235B-A22B-Thinking`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:235,active_parameters:22,page_url:`https://huggingface.co/Qwen/Qwen3-235B-A22B-Thinking-2507`,release_date:`2025-07-25`,context_window:262144,license:`apache-2.0`,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-235B-A22B-Thinking`,name:`Qwen3-235B-A22B-Thinking-2507`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},snapshots:[`Qwen/Qwen3-235B-A22B-Thinking-2507`],model_type:`chat`,parameters:235,active_parameters:22,page_url:`https://huggingface.co/Qwen/Qwen3-235B-A22B-Thinking-2507`,release_date:`2025-07-25`,context_window:262144,license:`apache-2.0`,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-235B-A22B`,name:`Qwen3-235B-A22B`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:235,active_parameters:15,page_url:`https://huggingface.co/Qwen/Qwen3-235B-A22B`,release_date:`2025-04-27`,context_window:40960,license:`apache-2.0`,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-30B-A3B-Instruct-2507`,name:`Qwen3-30B-A3B-Instruct-2507`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,alias:`Qwen/Qwen3-30B-A3B-Instruct`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:30,active_parameters:3,page_url:`https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507`,release_date:`2025-07-28`,context_window:262144,license:`apache-2.0`,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-30B-A3B-Instruct`,name:`Qwen3-30B-A3B-Instruct-2507`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},snapshots:[`Qwen/Qwen3-30B-A3B-Instruct-2507`],model_type:`chat`,parameters:30,active_parameters:3,page_url:`https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507`,release_date:`2025-07-28`,context_window:262144,license:`apache-2.0`,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-30B-A3B`,name:`Qwen3-30B-A3B`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:30,active_parameters:3,page_url:`https://huggingface.co/Qwen/Qwen3-30B-A3B`,release_date:`2025-04-27`,context_window:40960,license:`apache-2.0`,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-32B`,name:`Qwen3-32B`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:32,page_url:`https://huggingface.co/Qwen/Qwen3-32B`,release_date:`2025-04-27`,context_window:40960,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-4B-Instruct-2507`,name:`Qwen3-4B-Instruct-2507`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,alias:`Qwen/Qwen3-4B-Instruct`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:4,page_url:`https://huggingface.co/Qwen/Qwen3-4B-Instruct-2507`,release_date:`2025-08-05`,context_window:262144,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-4B-Instruct`,name:`Qwen3-4B-Instruct-2507`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},snapshots:[`Qwen/Qwen3-4B-Instruct-2507`],model_type:`chat`,parameters:4,page_url:`https://huggingface.co/Qwen/Qwen3-4B-Instruct-2507`,release_date:`2025-08-05`,context_window:262144,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-4B-Thinking-2507`,name:`Qwen3-4B-Thinking-2507`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,alias:`Qwen/Qwen3-4B-Thinking`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:4,page_url:`https://huggingface.co/Qwen/Qwen3-4B-Thinking-2507`,release_date:`2025-08-05`,context_window:262144,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-4B-Thinking`,name:`Qwen3-4B-Thinking-2507`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},snapshots:[`Qwen/Qwen3-4B-Thinking-2507`],model_type:`chat`,parameters:4,page_url:`https://huggingface.co/Qwen/Qwen3-4B-Thinking-2507`,release_date:`2025-08-05`,context_window:262144,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-8B-Base`,name:`Qwen3-8B-Base`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:8.2,page_url:`https://huggingface.co/Qwen/Qwen3-8B-Base`,release_date:`2025-04-28`,context_window:32768,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-8B`,name:`Qwen3-8B`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:8.2,page_url:`https://huggingface.co/Qwen/Qwen3-8B`,release_date:`2025-04-27`,context_window:40960,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-Coder-30B-A3B-Instruct`,name:`Qwen3-Coder-30B-A3B-Instruct`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`code`,parameters:31,active_parameters:1.9,page_url:`https://huggingface.co/Qwen/Qwen3-Coder-30B-A3B-Instruct`,release_date:`2025-07-31`,context_window:262144,license:`apache-2.0`,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8`,name:`Qwen3-Coder-480B-A35B-Instruct-FP8`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`code`,parameters:480,active_parameters:35,release_date:`2025-07-22`,context_window:262144,license:`apache-2.0`,page_url:`https://huggingface.co/Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8`,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-Coder-480B-A35B-Instruct`,name:`Qwen3-Coder-480B-A35B-Instruct`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`code`,parameters:480,active_parameters:24,page_url:`https://huggingface.co/Qwen/Qwen3-Coder-480B-A35B-Instruct`,release_date:`2025-07-22`,context_window:262144,license:`apache-2.0`,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-Coder-Next-FP8`,name:`Qwen3-Coder-Next-FP8`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`code`,page_url:`https://huggingface.co/Qwen/Qwen3-Coder-Next-FP8`,release_date:`2026-02-01`,context_window:262144,license:`apache-2.0`,parameters:80,active_parameters:1.6,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-Coder-Next`,name:`Qwen3-Coder-Next`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`code`,release_date:`2026-01-30`,context_window:262144,license:`apache-2.0`,parameters:80,active_parameters:1.6,page_url:`https://huggingface.co/Qwen/Qwen3-Coder-Next`,architecture:`moe`,open_weight:!0,tools:[`function_calling`],max_output_tokens:65536,modalities:{input:[`text`],output:[`text`]}},{id:`Qwen/Qwen3-Next-80B-A3B-Instruct`,name:`Qwen3-Next-80B-A3B-Instruct`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:81,active_parameters:1.6,page_url:`https://huggingface.co/Qwen/Qwen3-Next-80B-A3B-Instruct`,description:`Over the past few months, we have observed increasingly clear trends toward scaling both total parameters and context lengths in the pursuit of more powerful and agentic artificial intelligence (AI). We are excited to share our latest advancements in addressing these demands, centered on improving scaling efficiency through innovative model architecture. We call this next-generation foundation models **Qwen3-Next**.`,release_date:`2025-09-09`,context_window:262144,license:`apache-2.0`,tagline:`Over the past few months, we have observed increasingly clear trends toward scaling both total parameters and context lengths in the pursuit of more powerful and agentic artificial intelligence (AI).`,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`Qwen/Qwen3-Next-80B-A3B-Thinking`,name:`Qwen3-Next-80B-A3B-Thinking`,created_by:`qwen`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:80,active_parameters:3,page_url:`https://huggingface.co/Qwen/Qwen3-Next-80B-A3B-Thinking`,description:`Over the past few months, we have observed increasingly clear trends toward scaling both total parameters and context lengths in the pursuit of more powerful and agentic artificial intelligence (AI). We are excited to share our latest advancements in addressing these demands, centered on improving scaling efficiency through innovative model architecture. We call this next-generation foundation models **Qwen3-Next**.`,release_date:`2025-09-09`,context_window:262144,license:`apache-2.0`,tagline:`Over the past few months, we have observed increasingly clear trends toward scaling both total parameters and context lengths in the pursuit of more powerful and agentic artificial intelligence (AI).`,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`RedHatAI/Meta-Llama-3-8B-Instruct-FP8-KV`,name:`Meta-Llama-3-8B-Instruct-FP8-KV`,created_by:`redhatai`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-3`,model_type:`chat`,parameters:8,page_url:`https://huggingface.co/RedHatAI/Meta-Llama-3-8B-Instruct-FP8-KV`,release_date:`2024-05-20`,context_window:8192,architecture:`transformer`,tools:[`function_calling`]},{id:`SakanaAI/TinySwallow-1.5B-Instruct`,name:`TinySwallow-1.5B-Instruct`,created_by:`sakanaai`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},parameters:1.5,page_url:`https://huggingface.co/SakanaAI/TinySwallow-1.5B-Instruct`,description:`🤗 [Models](https://huggingface.co/SakanaAI) | 📚 [Paper](https://arxiv.org/abs/2501.16937) | 📝 [Blog](https://sakana.ai/taid-jp/) | 🐦 [Twitter](https://twitter.com/SakanaAILabs)`,release_date:`2025-01-07`,context_window:32768,license:`apache-2.0`,tagline:`🤗 [Models](https://huggingface.co/SakanaAI) | 📚 [Paper](https://arxiv.org/abs/2501.16937) | 📝 [Blog](https://sakana.ai/taid-jp/) | 🐦 [Twitter](https://twitter.com/SakanaAILabs)`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`speakleash/Bielik-7B-Instruct-v0.1`,name:`Bielik-7B-Instruct-v0.1`,created_by:`speakleash`,source:`official`,last_updated:`2026-03-21`,capabilities:{streaming:!0},parameters:7},{id:`swap-uniba/LLaMAntino-3-ANITA-8B-Inst-DPO-ITA`,name:`LLaMAntino-3-ANITA-8B-Inst-DPO-ITA`,created_by:`swap-uniba`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:8,page_url:`https://huggingface.co/swap-uniba/LLaMAntino-3-ANITA-8B-Inst-DPO-ITA`,description:`📣 New MODEL FAMILY❗ [https://huggingface.co/m-polignano/ANITA-NEXT-24B-Magistral-2506-VISION-ITA](https://huggingface.co/m-polignano/ANITA-NEXT-24B-Magistral-2506-VISION-ITA)`,release_date:`2024-04-29`,context_window:8192,license:`llama3`,tagline:`📣 New MODEL FAMILY❗ [https://huggingface.co/m-polignano/ANITA-NEXT-24B-Magistral-2506-VISION-ITA](https://huggingface.co/m-polignano/ANITA-NEXT-24B-Magistral-2506-VISION-ITA)`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`swiss-ai/Apertus-8B-Instruct-2509`,name:`Apertus-8B-Instruct-2509`,created_by:`swiss-ai`,source:`official`,last_updated:`2026-03-23`,alias:`swiss-ai/Apertus-8B-Instruct`,capabilities:{streaming:!0,tool_call:!0},parameters:8,page_url:`https://huggingface.co/swiss-ai/Apertus-8B-Instruct-2509`,release_date:`2025-08-13`,context_window:65536,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`swiss-ai/Apertus-8B-Instruct`,name:`Apertus-8B-Instruct-2509`,created_by:`swiss-ai`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},snapshots:[`swiss-ai/Apertus-8B-Instruct-2509`],parameters:8,page_url:`https://huggingface.co/swiss-ai/Apertus-8B-Instruct-2509`,release_date:`2025-08-13`,context_window:65536,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`tokyotech-llm/Llama-3-Swallow-8B-Instruct-v0.1`,name:`Llama-3-Swallow-8B-Instruct-v0.1`,created_by:`tokyotech-llm`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-3`,model_type:`chat`,parameters:8,page_url:`https://huggingface.co/tokyotech-llm/Llama-3-Swallow-8B-Instruct-v0.1`,description:`Our Swallow model has undergone continual pre-training from the [Llama 3 family](https://huggingface.co/collections/meta-llama/meta-llama-3-66214712577ca38149ebb2b6), primarily with the addition of Japanese language data. The Instruct versions use supervised fine-tuning (SFT) and Chat Vector. Links to other models can be found in the index.`,release_date:`2024-06-26`,context_window:8192,license:`llama3`,tagline:`Our Swallow model has undergone continual pre-training from the [Llama 3 family](https://huggingface.co/collections/meta-llama/meta-llama-3-66214712577ca38149ebb2b6), primarily with the addition of Japanese language data.`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`tokyotech-llm/Llama-3.1-Swallow-8B-Instruct-v0.5`,name:`Llama-3.1-Swallow-8B-Instruct-v0.5`,created_by:`tokyotech-llm`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-3.1`,model_type:`chat`,parameters:8,page_url:`https://huggingface.co/tokyotech-llm/Llama-3.1-Swallow-8B-Instruct-v0.5`,description:`Llama 3.1 Swallow is a series of large language models (8B, 70B) that were built by continual pre-training on the [Meta Llama 3.1](https://huggingface.co/collections/meta-llama/llama-31-669fc079a0c406a149a5738f) models. Llama 3.1 Swallow enhanced the Japanese language capabilities of the original Llama 3.1 while retaining the English language capabilities.`,release_date:`2025-06-12`,context_window:131072,license:`llama3.3`,tagline:`Llama 3.1 Swallow is a series of large language models (8B, 70B) that were built by continual pre-training on the [Meta Llama 3.1](https://huggingface.co/collections/meta-llama/llama-31-669fc079a0c406a149a5738f) models.`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`unsloth/gemma-2-9b-it`,name:`gemma-2-9b-it`,created_by:`unsloth`,source:`official`,last_updated:`2026-03-23`,family:`gemma-2`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:9.2,page_url:`https://huggingface.co/unsloth/gemma-2-9b-it`,release_date:`2024-07-03`,context_window:8192,license:`gemma`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`unsloth/llama-3-8b-Instruct`,name:`llama-3-8b-Instruct`,created_by:`unsloth`,source:`official`,last_updated:`2026-03-23`,family:`llama-3`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:8,page_url:`https://huggingface.co/unsloth/llama-3-8b-Instruct`,description:"Directly quantized 4bit model with `bitsandbytes`.",release_date:`2024-04-18`,context_window:8192,license:`llama3`,tagline:"Directly quantized 4bit model with `bitsandbytes`.",architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`unsloth/Llama-3.1-8B-Instruct`,name:`Llama-3.1-8B-Instruct`,created_by:`unsloth`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-3.1`,model_type:`chat`,parameters:8,page_url:`https://huggingface.co/unsloth/Llama-3.1-8B-Instruct`,release_date:`2025-02-15`,context_window:131072,license:`llama3.1`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`unsloth/Meta-Llama-3.1-8B-Instruct`,name:`Meta-Llama-3.1-8B-Instruct`,created_by:`unsloth`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-3.1`,model_type:`chat`,parameters:8,page_url:`https://huggingface.co/unsloth/Meta-Llama-3.1-8B-Instruct`,description:`We have a free Google Colab Tesla T4 notebook for Llama 3.1 (8B) here: https://colab.research.google.com/drive/1Ys44kVvmeZtnICzWz0xgpRnrIOjZAuxp?usp=sharing`,release_date:`2024-07-23`,context_window:131072,license:`llama3.1`,tagline:`We have a free Google Colab Tesla T4 notebook for Llama 3.1 (8B) here: https://colab.research.google.com/drive/1Ys44kVvmeZtnICzWz0xgpRnrIOjZAuxp?usp=sharing`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`unsloth/Mistral-Small-24B-Instruct-2501`,name:`Mistral-Small-24B-Instruct-2501`,created_by:`unsloth`,source:`official`,last_updated:`2026-03-23`,alias:`unsloth/Mistral-Small-24B-Instruct`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:24,page_url:`https://huggingface.co/unsloth/Mistral-Small-24B-Instruct-2501`,description:`We have a free Google Colab Tesla T4 notebook for Mistral (7B) here: https://colab.research.google.com/github/unslothai/notebooks/blob/main/nb/Mistral_v0.3_(7B)-Conversational.ipynb`,release_date:`2025-01-30`,context_window:32768,license:`apache-2.0`,tagline:`We have a free Google Colab Tesla T4 notebook for Mistral (7B) here: https://colab.research.google.com/github/unslothai/notebooks/blob/main/nb/Mistral_v0.3_(7B)-Conversational.ipynb`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`unsloth/Mistral-Small-24B-Instruct`,name:`Mistral-Small-24B-Instruct-2501`,created_by:`unsloth`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},snapshots:[`unsloth/Mistral-Small-24B-Instruct-2501`],model_type:`chat`,parameters:24,page_url:`https://huggingface.co/unsloth/Mistral-Small-24B-Instruct-2501`,description:`We have a free Google Colab Tesla T4 notebook for Mistral (7B) here: https://colab.research.google.com/github/unslothai/notebooks/blob/main/nb/Mistral_v0.3_(7B)-Conversational.ipynb`,release_date:`2025-01-30`,context_window:32768,license:`apache-2.0`,tagline:`We have a free Google Colab Tesla T4 notebook for Mistral (7B) here: https://colab.research.google.com/github/unslothai/notebooks/blob/main/nb/Mistral_v0.3_(7B)-Conversational.ipynb`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`unsloth/Qwen2.5-14B-Instruct`,name:`Qwen2.5-14B-Instruct`,created_by:`unsloth`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:15,page_url:`https://huggingface.co/unsloth/Qwen2.5-14B-Instruct`,description:`We have a Qwen 2.5 (all model sizes) [free Google Colab Tesla T4 notebook](https://colab.research.google.com/drive/1Kose-ucXO1IBaZq5BvbwWieuubP7hxvQ?usp=sharing). Also a [Qwen 2.5 conversational style notebook](https://colab.research.google.com/drive/1qN1CEalC70EO1wGKhNxs1go1W9So61R5?usp=sharing).`,release_date:`2024-09-23`,context_window:32768,license:`apache-2.0`,tagline:`We have a Qwen 2.5 (all model sizes) [free Google Colab Tesla T4 notebook](https://colab.research.google.com/drive/1Kose-ucXO1IBaZq5BvbwWieuubP7hxvQ?usp=sharing).`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`unsloth/Qwen2.5-32B-Instruct`,name:`Qwen2.5-32B-Instruct`,created_by:`unsloth`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:33,page_url:`https://huggingface.co/unsloth/Qwen2.5-32B-Instruct`,description:`We have a Qwen 2.5 (all model sizes) [free Google Colab Tesla T4 notebook](https://colab.research.google.com/drive/1Kose-ucXO1IBaZq5BvbwWieuubP7hxvQ?usp=sharing). Also a [Qwen 2.5 conversational style notebook](https://colab.research.google.com/drive/1qN1CEalC70EO1wGKhNxs1go1W9So61R5?usp=sharing).`,release_date:`2024-09-23`,context_window:32768,license:`apache-2.0`,tagline:`We have a Qwen 2.5 (all model sizes) [free Google Colab Tesla T4 notebook](https://colab.research.google.com/drive/1Kose-ucXO1IBaZq5BvbwWieuubP7hxvQ?usp=sharing).`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`unsloth/Qwen2.5-7B-Instruct`,name:`Qwen2.5-7B-Instruct`,created_by:`unsloth`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:7.6,page_url:`https://huggingface.co/unsloth/Qwen2.5-7B-Instruct`,description:`We have a Qwen 2.5 (all model sizes) [free Google Colab Tesla T4 notebook](https://colab.research.google.com/drive/1Kose-ucXO1IBaZq5BvbwWieuubP7hxvQ?usp=sharing). Also a [Qwen 2.5 conversational style notebook](https://colab.research.google.com/drive/1qN1CEalC70EO1wGKhNxs1go1W9So61R5?usp=sharing).`,release_date:`2024-09-18`,context_window:32768,license:`apache-2.0`,tagline:`We have a Qwen 2.5 (all model sizes) [free Google Colab Tesla T4 notebook](https://colab.research.google.com/drive/1Kose-ucXO1IBaZq5BvbwWieuubP7hxvQ?usp=sharing).`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`unsloth/Qwen2.5-Coder-32B-Instruct`,name:`Qwen2.5-Coder-32B-Instruct`,created_by:`unsloth`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`code`,parameters:33,page_url:`https://huggingface.co/unsloth/Qwen2.5-Coder-32B-Instruct`,description:`We have a Qwen 2.5 (all model sizes) [free Google Colab Tesla T4 notebook](https://colab.research.google.com/drive/1Kose-ucXO1IBaZq5BvbwWieuubP7hxvQ?usp=sharing). Also a [Qwen 2.5 conversational style notebook](https://colab.research.google.com/drive/1qN1CEalC70EO1wGKhNxs1go1W9So61R5?usp=sharing).`,release_date:`2024-11-12`,context_window:32768,license:`apache-2.0`,tagline:`We have a Qwen 2.5 (all model sizes) [free Google Colab Tesla T4 notebook](https://colab.research.google.com/drive/1Kose-ucXO1IBaZq5BvbwWieuubP7hxvQ?usp=sharing).`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`unsloth/Qwen3-8B`,name:`Qwen3-8B`,created_by:`unsloth`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},model_type:`chat`,parameters:8,page_url:`https://huggingface.co/unsloth/Qwen3-8B`,release_date:`2025-04-28`,context_window:40960,license:`apache-2.0`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`unsloth/tinyllama-chat`,name:`tinyllama-chat`,created_by:`unsloth`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},page_url:`https://huggingface.co/unsloth/tinyllama-chat`,description:`[<img src="https://raw.githubusercontent.com/unslothai/unsloth/main/images/Discord%20button.png" width="200"/>](https://discord.gg/u54VK8m8tk) [<img src="https://raw.githubusercontent.com/unslothai/unsloth/main/images/buy%20me%20a%20coffee%20button.png" width="200"/>](https://ko-fi.com/unsloth) [<img src="https://raw.githubusercontent.com/unslothai/unsloth/main/images/unsloth%20made%20with%20love.png" width="200"/>](https://github.com/unslothai/unsloth)`,release_date:`2024-02-14`,context_window:2048,license:`apache-2.0`,parameters:1.1,tagline:`[<img src="https://raw.githubusercontent.com/unslothai/unsloth/main/images/Discord%20button.png" width="200"/>](https://discord.gg/u54VK8m8tk) [<img src="https://raw.githubusercontent.com/unslothai/un`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`VAGOsolutions/Llama-3-SauerkrautLM-8b-Instruct`,name:`Llama-3-SauerkrautLM-8b-Instruct`,created_by:`vagosolutions`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`Llama-3`,model_type:`chat`,parameters:8,page_url:`https://huggingface.co/VAGOsolutions/Llama-3-SauerkrautLM-8b-Instruct`,release_date:`2024-04-19`,context_window:8192,license:`other`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`zai-org/GLM-4-9B-0414`,name:`GLM-4-9B-0414`,created_by:`zhipu`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`GLM-4`,model_type:`chat`,parameters:9.4,page_url:`https://huggingface.co/zai-org/GLM-4-9B-0414`,release_date:`2025-04-07`,context_window:32768,license:`mit`,architecture:`transformer`,open_weight:!0,tools:[`function_calling`]},{id:`zai-org/GLM-4.5-Air-FP8`,name:`GLM-4.5-Air-FP8`,created_by:`zhipu`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`GLM-4.5`,model_type:`chat`,page_url:`https://huggingface.co/zai-org/GLM-4.5-Air-FP8`,description:`👋 Join our <a href="https://discord.gg/QR7SARHRxK" target="_blank">Discord</a> community.`,release_date:`2025-07-20`,context_window:131072,license:`mit`,tagline:`👋 Join our <a href="https://discord.gg/QR7SARHRxK" target="_blank">Discord</a> community.`,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`zai-org/GLM-4.5-Air`,name:`GLM-4.5-Air`,created_by:`zhipu`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`GLM-4.5`,model_type:`chat`,page_url:`https://huggingface.co/zai-org/GLM-4.5-Air`,description:`👋 Join our <a href="https://discord.gg/QR7SARHRxK" target="_blank">Discord</a> community.`,release_date:`2025-07-20`,context_window:131072,license:`mit`,parameters:110,tagline:`👋 Join our <a href="https://discord.gg/QR7SARHRxK" target="_blank">Discord</a> community.`,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`zai-org/GLM-4.5`,name:`GLM-4.5`,created_by:`zhipu`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`GLM-4.5`,model_type:`chat`,page_url:`https://huggingface.co/zai-org/GLM-4.5`,description:`👋 Join our <a href="https://discord.gg/QR7SARHRxK" target="_blank">Discord</a> community.`,release_date:`2025-07-20`,context_window:131072,license:`mit`,parameters:358,tagline:`👋 Join our <a href="https://discord.gg/QR7SARHRxK" target="_blank">Discord</a> community.`,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`zai-org/GLM-4.6-FP8`,name:`GLM-4.6-FP8`,created_by:`zhipu`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`GLM-4.6`,model_type:`chat`,page_url:`https://huggingface.co/zai-org/GLM-4.6-FP8`,description:`👋 Join our <a href="https://discord.gg/QR7SARHRxK" target="_blank">Discord</a> community.`,release_date:`2025-09-29`,context_window:202752,license:`mit`,tagline:`👋 Join our <a href="https://discord.gg/QR7SARHRxK" target="_blank">Discord</a> community.`,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`zai-org/GLM-4.6`,name:`GLM-4.6`,created_by:`zhipu`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`GLM-4.6`,model_type:`chat`,page_url:`https://huggingface.co/zai-org/GLM-4.6`,description:`👋 Join our <a href="https://discord.gg/QR7SARHRxK" target="_blank">Discord</a> community.`,release_date:`2025-09-29`,context_window:202752,license:`mit`,parameters:357,tagline:`👋 Join our <a href="https://discord.gg/QR7SARHRxK" target="_blank">Discord</a> community.`,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`zai-org/GLM-4.7-Flash`,name:`GLM-4.7-Flash`,created_by:`zhipu`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`GLM-4.7`,model_type:`chat`,page_url:`https://huggingface.co/zai-org/GLM-4.7-Flash`,description:`👋 Join our <a href="https://discord.gg/QR7SARHRxK" target="_blank">Discord</a> community.`,release_date:`2026-01-19`,context_window:202752,license:`mit`,parameters:31,tagline:`👋 Join our <a href="https://discord.gg/QR7SARHRxK" target="_blank">Discord</a> community.`,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`zai-org/GLM-4.7-FP8`,name:`GLM-4.7-FP8`,created_by:`zhipu`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`GLM-4.7`,model_type:`chat`,page_url:`https://huggingface.co/zai-org/GLM-4.7-FP8`,description:`👋 Join our <a href="https://discord.gg/QR7SARHRxK" target="_blank">Discord</a> community.`,release_date:`2025-12-22`,context_window:202752,license:`mit`,tagline:`👋 Join our <a href="https://discord.gg/QR7SARHRxK" target="_blank">Discord</a> community.`,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`zai-org/GLM-4.7`,name:`GLM-4.7`,created_by:`zhipu`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`GLM-4.7`,model_type:`chat`,page_url:`https://huggingface.co/zai-org/GLM-4.7`,description:`👋 Join our <a href="https://discord.gg/QR7SARHRxK" target="_blank">Discord</a> community.`,release_date:`2025-12-22`,context_window:202752,license:`mit`,parameters:358,tagline:`👋 Join our <a href="https://discord.gg/QR7SARHRxK" target="_blank">Discord</a> community.`,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`zai-org/GLM-5`,name:`GLM-5`,created_by:`zhipu`,source:`official`,last_updated:`2026-03-23`,capabilities:{streaming:!0,tool_call:!0},family:`GLM-5`,model_type:`chat`,page_url:`https://huggingface.co/zai-org/GLM-5`,description:`👋 Join our <a href="https://raw.githubusercontent.com/zai-org/GLM-5/refs/heads/main/resources/wechat.png" target="_blank">WeChat</a> or <a href="https://discord.gg/QR7SARHRxK" target="_blank">Discord</a> community.`,release_date:`2026-02-11`,context_window:202752,license:`mit`,parameters:754,tagline:`👋 Join our <a href="https://raw.githubusercontent.com/zai-org/GLM-5/refs/heads/main/resources/wechat.png" target="_blank">WeChat</a> or <a href="https://discord.gg/QR7SARHRxK" target="_blank">Discord`,architecture:`moe`,open_weight:!0,tools:[`function_calling`]},{id:`zai-org/GLM-Z1-32B-0414`,name:`GLM-Z1-32B-0414`,created_by:`zhipu`,source:`official`,last_updated:`2026-03-21`,capabilities:{streaming:!0},model_type:`chat`,family:`glm`,parameters:32}]},t=e.models.map(t=>({...t,provider:e.id}));export{t as models,e as provider};