@huggingface/tasks 0.19.2 → 0.19.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commonjs/gguf.d.ts +48 -4
- package/dist/commonjs/gguf.d.ts.map +1 -1
- package/dist/commonjs/gguf.js +138 -8
- package/dist/commonjs/model-libraries-snippets.d.ts +4 -1
- package/dist/commonjs/model-libraries-snippets.d.ts.map +1 -1
- package/dist/commonjs/model-libraries-snippets.js +101 -10
- package/dist/commonjs/model-libraries.d.ts +24 -1
- package/dist/commonjs/model-libraries.d.ts.map +1 -1
- package/dist/commonjs/model-libraries.js +23 -0
- package/dist/commonjs/tasks/index.js +1 -1
- package/dist/esm/gguf.d.ts +48 -4
- package/dist/esm/gguf.d.ts.map +1 -1
- package/dist/esm/gguf.js +136 -7
- package/dist/esm/model-libraries-snippets.d.ts +4 -1
- package/dist/esm/model-libraries-snippets.d.ts.map +1 -1
- package/dist/esm/model-libraries-snippets.js +95 -7
- package/dist/esm/model-libraries.d.ts +24 -1
- package/dist/esm/model-libraries.d.ts.map +1 -1
- package/dist/esm/model-libraries.js +23 -0
- package/dist/esm/tasks/index.js +1 -1
- package/package.json +1 -2
- package/src/gguf.ts +154 -9
- package/src/model-libraries-snippets.ts +103 -7
- package/src/model-libraries.ts +23 -0
- package/src/tasks/index.ts +1 -1
- package/src/tasks/reinforcement-learning/about.md +1 -1
package/dist/esm/gguf.js
CHANGED
|
@@ -1,3 +1,137 @@
|
|
|
1
|
+
// This list is copied from gguf/types.ts, but will all types available (for backward compatibility)
|
|
2
|
+
// NOT to be confused with GGMLQuantizationType, a FileQuantization can contain multiple GGMLQuantizationType
|
|
3
|
+
// For example, Q4_K_M model can contains Q4_K and Q6_K tensors
|
|
4
|
+
export var GGMLFileQuantizationType;
|
|
5
|
+
(function (GGMLFileQuantizationType) {
|
|
6
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["F32"] = 0] = "F32";
|
|
7
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["F16"] = 1] = "F16";
|
|
8
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["Q4_0"] = 2] = "Q4_0";
|
|
9
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["Q4_1"] = 3] = "Q4_1";
|
|
10
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["Q4_1_SOME_F16"] = 4] = "Q4_1_SOME_F16";
|
|
11
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["Q4_2"] = 5] = "Q4_2";
|
|
12
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["Q4_3"] = 6] = "Q4_3";
|
|
13
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["Q8_0"] = 7] = "Q8_0";
|
|
14
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["Q5_0"] = 8] = "Q5_0";
|
|
15
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["Q5_1"] = 9] = "Q5_1";
|
|
16
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["Q2_K"] = 10] = "Q2_K";
|
|
17
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["Q3_K_S"] = 11] = "Q3_K_S";
|
|
18
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["Q3_K_M"] = 12] = "Q3_K_M";
|
|
19
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["Q3_K_L"] = 13] = "Q3_K_L";
|
|
20
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["Q4_K_S"] = 14] = "Q4_K_S";
|
|
21
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["Q4_K_M"] = 15] = "Q4_K_M";
|
|
22
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["Q5_K_S"] = 16] = "Q5_K_S";
|
|
23
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["Q5_K_M"] = 17] = "Q5_K_M";
|
|
24
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["Q6_K"] = 18] = "Q6_K";
|
|
25
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["IQ2_XXS"] = 19] = "IQ2_XXS";
|
|
26
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["IQ2_XS"] = 20] = "IQ2_XS";
|
|
27
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["Q2_K_S"] = 21] = "Q2_K_S";
|
|
28
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["IQ3_XS"] = 22] = "IQ3_XS";
|
|
29
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["IQ3_XXS"] = 23] = "IQ3_XXS";
|
|
30
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["IQ1_S"] = 24] = "IQ1_S";
|
|
31
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["IQ4_NL"] = 25] = "IQ4_NL";
|
|
32
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["IQ3_S"] = 26] = "IQ3_S";
|
|
33
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["IQ3_M"] = 27] = "IQ3_M";
|
|
34
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["IQ2_S"] = 28] = "IQ2_S";
|
|
35
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["IQ2_M"] = 29] = "IQ2_M";
|
|
36
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["IQ4_XS"] = 30] = "IQ4_XS";
|
|
37
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["IQ1_M"] = 31] = "IQ1_M";
|
|
38
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["BF16"] = 32] = "BF16";
|
|
39
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["Q4_0_4_4"] = 33] = "Q4_0_4_4";
|
|
40
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["Q4_0_4_8"] = 34] = "Q4_0_4_8";
|
|
41
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["Q4_0_8_8"] = 35] = "Q4_0_8_8";
|
|
42
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["TQ1_0"] = 36] = "TQ1_0";
|
|
43
|
+
GGMLFileQuantizationType[GGMLFileQuantizationType["TQ2_0"] = 37] = "TQ2_0";
|
|
44
|
+
})(GGMLFileQuantizationType || (GGMLFileQuantizationType = {}));
|
|
45
|
+
const ggufQuants = Object.values(GGMLFileQuantizationType).filter((v) => typeof v === "string");
|
|
46
|
+
export const GGUF_QUANT_RE = new RegExp(`(?<quant>${ggufQuants.join("|")})` + "(_(?<sizeVariation>[A-Z]+))?");
|
|
47
|
+
export const GGUF_QUANT_RE_GLOBAL = new RegExp(GGUF_QUANT_RE, "g");
|
|
48
|
+
export function parseGGUFQuantLabel(fname) {
|
|
49
|
+
const quantLabel = fname.toUpperCase().match(GGUF_QUANT_RE_GLOBAL)?.at(-1); // if there is multiple quant substrings in a name, we prefer the last one
|
|
50
|
+
return quantLabel;
|
|
51
|
+
}
|
|
52
|
+
// order of quantization, from biggest to smallest
|
|
53
|
+
// this list must be in sync with the order in GGMLFileQuantizationType
|
|
54
|
+
// the gguf.spec.ts tests are using verify if the order is correct
|
|
55
|
+
export const GGUF_QUANT_ORDER = [
|
|
56
|
+
GGMLFileQuantizationType.F32,
|
|
57
|
+
GGMLFileQuantizationType.BF16,
|
|
58
|
+
GGMLFileQuantizationType.F16,
|
|
59
|
+
GGMLFileQuantizationType.Q8_0,
|
|
60
|
+
// 6-bit quantizations
|
|
61
|
+
GGMLFileQuantizationType.Q6_K,
|
|
62
|
+
// 5-bit quantizations
|
|
63
|
+
GGMLFileQuantizationType.Q5_0,
|
|
64
|
+
GGMLFileQuantizationType.Q5_1,
|
|
65
|
+
GGMLFileQuantizationType.Q5_K_M,
|
|
66
|
+
GGMLFileQuantizationType.Q5_K_S,
|
|
67
|
+
// 4-bit quantizations
|
|
68
|
+
GGMLFileQuantizationType.Q4_K_M,
|
|
69
|
+
GGMLFileQuantizationType.Q4_K_S,
|
|
70
|
+
GGMLFileQuantizationType.IQ4_NL,
|
|
71
|
+
GGMLFileQuantizationType.IQ4_XS,
|
|
72
|
+
GGMLFileQuantizationType.Q4_0_4_4,
|
|
73
|
+
GGMLFileQuantizationType.Q4_0_4_8,
|
|
74
|
+
GGMLFileQuantizationType.Q4_0_8_8,
|
|
75
|
+
GGMLFileQuantizationType.Q4_0,
|
|
76
|
+
GGMLFileQuantizationType.Q4_1_SOME_F16,
|
|
77
|
+
GGMLFileQuantizationType.Q4_1,
|
|
78
|
+
GGMLFileQuantizationType.Q4_2,
|
|
79
|
+
GGMLFileQuantizationType.Q4_3,
|
|
80
|
+
// 3-bit quantizations
|
|
81
|
+
GGMLFileQuantizationType.Q3_K_L,
|
|
82
|
+
GGMLFileQuantizationType.Q3_K_M,
|
|
83
|
+
GGMLFileQuantizationType.Q3_K_S,
|
|
84
|
+
GGMLFileQuantizationType.IQ3_M,
|
|
85
|
+
GGMLFileQuantizationType.IQ3_S,
|
|
86
|
+
GGMLFileQuantizationType.IQ3_XS,
|
|
87
|
+
GGMLFileQuantizationType.IQ3_XXS,
|
|
88
|
+
// 2-bit quantizations
|
|
89
|
+
GGMLFileQuantizationType.Q2_K,
|
|
90
|
+
GGMLFileQuantizationType.Q2_K_S,
|
|
91
|
+
GGMLFileQuantizationType.IQ2_M,
|
|
92
|
+
GGMLFileQuantizationType.IQ2_S,
|
|
93
|
+
GGMLFileQuantizationType.IQ2_XS,
|
|
94
|
+
GGMLFileQuantizationType.IQ2_XXS,
|
|
95
|
+
// 1-bit quantizations
|
|
96
|
+
GGMLFileQuantizationType.IQ1_S,
|
|
97
|
+
GGMLFileQuantizationType.IQ1_M,
|
|
98
|
+
GGMLFileQuantizationType.TQ1_0,
|
|
99
|
+
GGMLFileQuantizationType.TQ2_0,
|
|
100
|
+
];
|
|
101
|
+
// This function finds the nearest quantization type that is less than or equal to the given quantization type.
|
|
102
|
+
// It returns undefined if no such quantization type is found.
|
|
103
|
+
export function findNearestQuantType(quant, availableQuants) {
|
|
104
|
+
// Create a map for quick index lookup from the defined order
|
|
105
|
+
const orderMap = new Map();
|
|
106
|
+
GGUF_QUANT_ORDER.forEach((q, index) => {
|
|
107
|
+
orderMap.set(q, index);
|
|
108
|
+
});
|
|
109
|
+
const targetIndex = orderMap.get(quant) ?? 0; // the 0 case should never happen
|
|
110
|
+
// Filter the available quantizations to include only those defined in the order map,
|
|
111
|
+
// then sort them according to the GGUF_QUANT_ORDER (from largest/index 0 to smallest/highest index).
|
|
112
|
+
const sortedAvailable = availableQuants
|
|
113
|
+
.filter((q) => orderMap.has(q))
|
|
114
|
+
.sort((a, b) => (orderMap.get(a) ?? Infinity) - (orderMap.get(b) ?? Infinity));
|
|
115
|
+
// If no valid quantizations are available after filtering
|
|
116
|
+
if (sortedAvailable.length === 0) {
|
|
117
|
+
return undefined;
|
|
118
|
+
}
|
|
119
|
+
// Iterate through the sorted available quantizations (largest to smallest).
|
|
120
|
+
// Find the first one whose order index is >= the target index.
|
|
121
|
+
// This means finding the largest quantization that is smaller than or equal to the target.
|
|
122
|
+
for (const availableQuant of sortedAvailable) {
|
|
123
|
+
// We know the key exists due to the filter above.
|
|
124
|
+
const availableIndex = orderMap.get(availableQuant) ?? 0;
|
|
125
|
+
if (availableIndex >= targetIndex) {
|
|
126
|
+
return availableQuant;
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
// If the loop completes, it means all available quantizations are larger (have a smaller index)
|
|
130
|
+
// than the target quantization. In this case, return the "smallest" available quantization,
|
|
131
|
+
// which is the last element in the sorted list (highest index among available).
|
|
132
|
+
return sortedAvailable[sortedAvailable.length - 1];
|
|
133
|
+
}
|
|
134
|
+
// This list is only used to calculate the size of the model, NOT to be confused with the quantization FILE type
|
|
1
135
|
export var GGMLQuantizationType;
|
|
2
136
|
(function (GGMLQuantizationType) {
|
|
3
137
|
GGMLQuantizationType[GGMLQuantizationType["F32"] = 0] = "F32";
|
|
@@ -29,11 +163,6 @@ export var GGMLQuantizationType;
|
|
|
29
163
|
GGMLQuantizationType[GGMLQuantizationType["F64"] = 28] = "F64";
|
|
30
164
|
GGMLQuantizationType[GGMLQuantizationType["IQ1_M"] = 29] = "IQ1_M";
|
|
31
165
|
GGMLQuantizationType[GGMLQuantizationType["BF16"] = 30] = "BF16";
|
|
166
|
+
GGMLQuantizationType[GGMLQuantizationType["TQ1_0"] = 34] = "TQ1_0";
|
|
167
|
+
GGMLQuantizationType[GGMLQuantizationType["TQ2_0"] = 35] = "TQ2_0";
|
|
32
168
|
})(GGMLQuantizationType || (GGMLQuantizationType = {}));
|
|
33
|
-
const ggufQuants = Object.values(GGMLQuantizationType).filter((v) => typeof v === "string");
|
|
34
|
-
export const GGUF_QUANT_RE = new RegExp(`(?<quant>${ggufQuants.join("|")})` + "(_(?<sizeVariation>[A-Z]+))?");
|
|
35
|
-
export const GGUF_QUANT_RE_GLOBAL = new RegExp(GGUF_QUANT_RE, "g");
|
|
36
|
-
export function parseGGUFQuantLabel(fname) {
|
|
37
|
-
const quantLabel = fname.toUpperCase().match(GGUF_QUANT_RE_GLOBAL)?.at(-1); // if there is multiple quant substrings in a name, we prefer the last one
|
|
38
|
-
return quantLabel;
|
|
39
|
-
}
|
|
@@ -12,6 +12,7 @@ export declare const depth_anything_v2: (model: ModelData) => string[];
|
|
|
12
12
|
export declare const depth_pro: (model: ModelData) => string[];
|
|
13
13
|
export declare const derm_foundation: () => string[];
|
|
14
14
|
export declare const dia: (model: ModelData) => string[];
|
|
15
|
+
export declare const describe_anything: (model: ModelData) => string[];
|
|
15
16
|
export declare const diffusers: (model: ModelData) => string[];
|
|
16
17
|
export declare const diffusionkit: (model: ModelData) => string[];
|
|
17
18
|
export declare const cartesia_pytorch: (model: ModelData) => string[];
|
|
@@ -37,6 +38,7 @@ export declare const mesh_anything: () => string[];
|
|
|
37
38
|
export declare const open_clip: (model: ModelData) => string[];
|
|
38
39
|
export declare const paddlenlp: (model: ModelData) => string[];
|
|
39
40
|
export declare const perception_encoder: (model: ModelData) => string[];
|
|
41
|
+
export declare const phantom_wan: (model: ModelData) => string[];
|
|
40
42
|
export declare const pyannote_audio_pipeline: (model: ModelData) => string[];
|
|
41
43
|
export declare const pyannote_audio: (model: ModelData) => string[];
|
|
42
44
|
export declare const relik: (model: ModelData) => string[];
|
|
@@ -70,8 +72,8 @@ export declare const chattts: () => string[];
|
|
|
70
72
|
export declare const ultralytics: (model: ModelData) => string[];
|
|
71
73
|
export declare const birefnet: (model: ModelData) => string[];
|
|
72
74
|
export declare const swarmformer: (model: ModelData) => string[];
|
|
73
|
-
export declare const mlx: (model: ModelData) => string[];
|
|
74
75
|
export declare const mlxim: (model: ModelData) => string[];
|
|
76
|
+
export declare const mlx: (model: ModelData) => string[];
|
|
75
77
|
export declare const model2vec: (model: ModelData) => string[];
|
|
76
78
|
export declare const nemo: (model: ModelData) => string[];
|
|
77
79
|
export declare const outetts: (model: ModelData) => string[];
|
|
@@ -82,4 +84,5 @@ export declare const audiocraft: (model: ModelData) => string[];
|
|
|
82
84
|
export declare const whisperkit: () => string[];
|
|
83
85
|
export declare const threedtopia_xl: (model: ModelData) => string[];
|
|
84
86
|
export declare const hezar: (model: ModelData) => string[];
|
|
87
|
+
export declare const zonos: (model: ModelData) => string[];
|
|
85
88
|
//# sourceMappingURL=model-libraries-snippets.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAkBjD,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAIhD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAaF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAY7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,cAAc,QAAO,MAAM,EAcvC,CAAC;AAEF,eAAO,MAAM,iBAAiB,UAAW,SAAS,KAAG,MAAM,EA6C1D,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAwBlD,CAAC;AAEF,eAAO,MAAM,eAAe,QAAO,MAAM,EAoBxC,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAS5C,CAAC;AAuCF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EAwCrD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EAmBrD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAgBjD,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAS9C,CAAC;AA4EF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EA+BrD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EA0BzD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,SAAS,KAAG,MAAM,EAgB3D,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,OAAO,QAA6B,MAAM,EAQtD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAanC,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EA2B7C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AASF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAuC7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,
|
|
1
|
+
{"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AAkBjD,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAIhD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAkBlD,CAAC;AAaF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAY7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,cAAc,QAAO,MAAM,EAcvC,CAAC;AAEF,eAAO,MAAM,iBAAiB,UAAW,SAAS,KAAG,MAAM,EA6C1D,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAwBlD,CAAC;AAEF,eAAO,MAAM,eAAe,QAAO,MAAM,EAoBxC,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAS5C,CAAC;AAEF,eAAO,MAAM,iBAAiB,UAAW,SAAS,KAAG,MAAM,EAY1D,CAAC;AAuCF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EAwCrD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAgBzD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EAmBrD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAgB/C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAgBjD,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAS9C,CAAC;AA4EF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EA+BrD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EA0BzD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAOjD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,aAAa,QAAO,MAAM,EAQtC,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,SAAS,KAAG,MAAM,EAgB3D,CAAC;AACF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAUpD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,OAAO,QAA6B,MAAM,EAQtD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAanC,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,kBAAkB,UAAW,SAAS,KAAG,MAAM,EAmC3D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EA2B7C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AASF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAuC7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EA8DrD,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAiB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,OAAO,QAAO,MAAM,EAYhC,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAiBpD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAYjD,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAKpD,CAAC;AA6BF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAY5C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAIlD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAuBhD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AA4BF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAUnD,CAAC;AAEF,eAAO,MAAM,UAAU,QAAO,MAAM,EAYnC,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAiB9C,CAAC"}
|
|
@@ -208,6 +208,19 @@ output = model.generate(text)
|
|
|
208
208
|
|
|
209
209
|
sf.write("simple.mp3", output, 44100)`,
|
|
210
210
|
];
|
|
211
|
+
export const describe_anything = (model) => [
|
|
212
|
+
`# pip install git+https://github.com/NVlabs/describe-anything
|
|
213
|
+
from huggingface_hub import snapshot_download
|
|
214
|
+
from dam import DescribeAnythingModel
|
|
215
|
+
|
|
216
|
+
snapshot_download(${model.id}, local_dir="checkpoints")
|
|
217
|
+
|
|
218
|
+
dam = DescribeAnythingModel(
|
|
219
|
+
model_path="checkpoints",
|
|
220
|
+
conv_mode="v1",
|
|
221
|
+
prompt_mode="focal_prompt",
|
|
222
|
+
)`,
|
|
223
|
+
];
|
|
211
224
|
const diffusersDefaultPrompt = "Astronaut in a jungle, cold color palette, muted colors, detailed, 8k";
|
|
212
225
|
const diffusers_default = (model) => [
|
|
213
226
|
`from diffusers import DiffusionPipeline
|
|
@@ -658,6 +671,17 @@ model = pe.VisionTransformer.from_config("${model.id}", pretrained=True)`;
|
|
|
658
671
|
return [vision_encoder];
|
|
659
672
|
}
|
|
660
673
|
};
|
|
674
|
+
export const phantom_wan = (model) => [
|
|
675
|
+
`from huggingface_hub import snapshot_download
|
|
676
|
+
from phantom_wan import WANI2V, configs
|
|
677
|
+
|
|
678
|
+
checkpoint_dir = snapshot_download("${model.id}")
|
|
679
|
+
wan_i2v = WanI2V(
|
|
680
|
+
config=configs.WAN_CONFIGS['i2v-14B'],
|
|
681
|
+
checkpoint_dir=checkpoint_dir,
|
|
682
|
+
)
|
|
683
|
+
video = wan_i2v.generate(text_prompt, image_prompt)`,
|
|
684
|
+
];
|
|
661
685
|
export const pyannote_audio_pipeline = (model) => [
|
|
662
686
|
`from pyannote.audio import Pipeline
|
|
663
687
|
|
|
@@ -1015,12 +1039,27 @@ export const transformers = (model) => {
|
|
|
1015
1039
|
].join("\n");
|
|
1016
1040
|
}
|
|
1017
1041
|
if (model.pipeline_tag && LIBRARY_TASK_MAPPING.transformers?.includes(model.pipeline_tag)) {
|
|
1018
|
-
const pipelineSnippet = [
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
|
|
1022
|
-
|
|
1023
|
-
|
|
1042
|
+
const pipelineSnippet = [
|
|
1043
|
+
"# Use a pipeline as a high-level helper",
|
|
1044
|
+
"from transformers import pipeline",
|
|
1045
|
+
"",
|
|
1046
|
+
`pipe = pipeline("${model.pipeline_tag}", model="${model.id}"` + remote_code_snippet + ")",
|
|
1047
|
+
];
|
|
1048
|
+
if (model.tags.includes("conversational")) {
|
|
1049
|
+
if (model.tags.includes("image-text-to-text")) {
|
|
1050
|
+
pipelineSnippet.push("messages = [", [
|
|
1051
|
+
" {",
|
|
1052
|
+
' "role": "user",',
|
|
1053
|
+
' "content": [',
|
|
1054
|
+
' {"type": "image", "url": "https://huggingface.co/datasets/huggingface/documentation-images/resolve/main/p-blog/candy.JPG"},',
|
|
1055
|
+
' {"type": "text", "text": "What animal is on the candy?"}',
|
|
1056
|
+
" ]",
|
|
1057
|
+
" },",
|
|
1058
|
+
].join("\n"), "]");
|
|
1059
|
+
}
|
|
1060
|
+
else {
|
|
1061
|
+
pipelineSnippet.push("messages = [", ' {"role": "user", "content": "Who are you?"},', "]");
|
|
1062
|
+
}
|
|
1024
1063
|
pipelineSnippet.push("pipe(messages)");
|
|
1025
1064
|
}
|
|
1026
1065
|
return [pipelineSnippet.join("\n"), autoSnippet];
|
|
@@ -1184,17 +1223,48 @@ export const swarmformer = (model) => [
|
|
|
1184
1223
|
model = SwarmFormerModel.from_pretrained("${model.id}")
|
|
1185
1224
|
`,
|
|
1186
1225
|
];
|
|
1187
|
-
|
|
1226
|
+
const mlx_unknown = (model) => [
|
|
1188
1227
|
`pip install huggingface_hub hf_transfer
|
|
1189
1228
|
|
|
1190
1229
|
export HF_HUB_ENABLE_HF_TRANSFER=1
|
|
1191
1230
|
huggingface-cli download --local-dir ${nameWithoutNamespace(model.id)} ${model.id}`,
|
|
1192
1231
|
];
|
|
1232
|
+
const mlxlm = (model) => [
|
|
1233
|
+
`pip install --upgrade mlx-lm
|
|
1234
|
+
|
|
1235
|
+
mlx_lm.generate --model ${model.id} --prompt "Hello"`,
|
|
1236
|
+
];
|
|
1237
|
+
const mlxchat = (model) => [
|
|
1238
|
+
`pip install --upgrade mlx-lm
|
|
1239
|
+
|
|
1240
|
+
mlx_lm.chat --model ${model.id}`,
|
|
1241
|
+
];
|
|
1242
|
+
const mlxvlm = (model) => [
|
|
1243
|
+
`pip install --upgrade mlx-vlm
|
|
1244
|
+
|
|
1245
|
+
mlx_vlm.generate --model ${model.id} \\
|
|
1246
|
+
--prompt "Describe this image." \\
|
|
1247
|
+
--image "https://huggingface.co/datasets/huggingface/documentation-images/resolve/0052a70beed5bf71b92610a43a52df6d286cd5f3/diffusers/rabbit.jpg"`,
|
|
1248
|
+
];
|
|
1193
1249
|
export const mlxim = (model) => [
|
|
1194
1250
|
`from mlxim.model import create_model
|
|
1195
1251
|
|
|
1196
1252
|
model = create_model(${model.id})`,
|
|
1197
1253
|
];
|
|
1254
|
+
export const mlx = (model) => {
|
|
1255
|
+
if (model.tags.includes("image-text-to-text")) {
|
|
1256
|
+
return mlxvlm(model);
|
|
1257
|
+
}
|
|
1258
|
+
if (model.tags.includes("conversational")) {
|
|
1259
|
+
if (model.config?.tokenizer_config?.chat_template) {
|
|
1260
|
+
return mlxchat(model);
|
|
1261
|
+
}
|
|
1262
|
+
else {
|
|
1263
|
+
return mlxlm(model);
|
|
1264
|
+
}
|
|
1265
|
+
}
|
|
1266
|
+
return mlx_unknown(model);
|
|
1267
|
+
};
|
|
1198
1268
|
export const model2vec = (model) => [
|
|
1199
1269
|
`from model2vec import StaticModel
|
|
1200
1270
|
|
|
@@ -1312,4 +1382,22 @@ export const hezar = (model) => [
|
|
|
1312
1382
|
|
|
1313
1383
|
model = Model.load("${model.id}")`,
|
|
1314
1384
|
];
|
|
1385
|
+
export const zonos = (model) => [
|
|
1386
|
+
`# pip install git+https://github.com/Zyphra/Zonos.git
|
|
1387
|
+
import torchaudio
|
|
1388
|
+
from zonos.model import Zonos
|
|
1389
|
+
from zonos.conditioning import make_cond_dict
|
|
1390
|
+
|
|
1391
|
+
model = Zonos.from_pretrained("${model.id}", device="cuda")
|
|
1392
|
+
|
|
1393
|
+
wav, sr = torchaudio.load("speaker.wav") # 5-10s reference clip
|
|
1394
|
+
speaker = model.make_speaker_embedding(wav, sr)
|
|
1395
|
+
|
|
1396
|
+
cond = make_cond_dict(text="Hello, world!", speaker=speaker, language="en-us")
|
|
1397
|
+
codes = model.generate(model.prepare_conditioning(cond))
|
|
1398
|
+
|
|
1399
|
+
audio = model.autoencoder.decode(codes)[0].cpu()
|
|
1400
|
+
torchaudio.save("sample.wav", audio, model.autoencoder.sampling_rate)
|
|
1401
|
+
`,
|
|
1402
|
+
];
|
|
1315
1403
|
//#endregion
|
|
@@ -226,6 +226,13 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
226
226
|
filter: false;
|
|
227
227
|
countDownloads: string;
|
|
228
228
|
};
|
|
229
|
+
"describe-anything": {
|
|
230
|
+
prettyLabel: string;
|
|
231
|
+
repoName: string;
|
|
232
|
+
repoUrl: string;
|
|
233
|
+
snippets: (model: ModelData) => string[];
|
|
234
|
+
filter: false;
|
|
235
|
+
};
|
|
229
236
|
"dia-tts": {
|
|
230
237
|
prettyLabel: string;
|
|
231
238
|
repoName: string;
|
|
@@ -653,6 +660,14 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
653
660
|
snippets: (model: ModelData) => string[];
|
|
654
661
|
countDownloads: string;
|
|
655
662
|
};
|
|
663
|
+
"phantom-wan": {
|
|
664
|
+
prettyLabel: string;
|
|
665
|
+
repoName: string;
|
|
666
|
+
repoUrl: string;
|
|
667
|
+
snippets: (model: ModelData) => string[];
|
|
668
|
+
filter: false;
|
|
669
|
+
countDownloads: string;
|
|
670
|
+
};
|
|
656
671
|
pxia: {
|
|
657
672
|
prettyLabel: string;
|
|
658
673
|
repoName: string;
|
|
@@ -999,6 +1014,14 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
999
1014
|
countDownloads: string;
|
|
1000
1015
|
snippets: (model: ModelData) => string[];
|
|
1001
1016
|
};
|
|
1017
|
+
zonos: {
|
|
1018
|
+
prettyLabel: string;
|
|
1019
|
+
repoName: string;
|
|
1020
|
+
repoUrl: string;
|
|
1021
|
+
docsUrl: string;
|
|
1022
|
+
snippets: (model: ModelData) => string[];
|
|
1023
|
+
filter: false;
|
|
1024
|
+
};
|
|
1002
1025
|
"3dtopia-xl": {
|
|
1003
1026
|
prettyLabel: string;
|
|
1004
1027
|
repoName: string;
|
|
@@ -1010,5 +1033,5 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
1010
1033
|
};
|
|
1011
1034
|
export type ModelLibraryKey = keyof typeof MODEL_LIBRARIES_UI_ELEMENTS;
|
|
1012
1035
|
export declare const ALL_MODEL_LIBRARY_KEYS: ModelLibraryKey[];
|
|
1013
|
-
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "anemoi" | "araclip" | "asteroid" | "audiocraft" | "audioseal" | "ben2" | "bertopic" | "big_vision" | "birder" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "comet" | "cosmos" | "cxr-foundation" | "deepforest" | "depth-anything-v2" | "depth-pro" | "derm-foundation" | "dia-tts" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "clipscope" | "cosyvoice" | "cotracker" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "geometry-crafter" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hezar" | "htrflow" | "hunyuan-dit" | "hunyuan3d-2" | "imstoucan" | "index-tts" | "infinite-you" | "keras" | "tf-keras" | "keras-hub" | "k2" | "lightning-ir" | "liveportrait" | "llama-cpp-python" | "mini-omni2" | "mindspore" | "mamba-ssm" | "mars5-tts" | "matanyone" | "mesh-anything" | "merlin" | "medvae" | "mitie" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "model2vec" | "moshi" | "nemo" | "open-oasis" | "open_clip" | "open-sora" | "outetts" | "paddlenlp" | "peft" | "perception-encoder" | "pxia" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "reverb" | "saelens" | "sam2" | "sample-factory" | "sapiens" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "ssr-speech" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "soloaudio" | "stable-baselines3" | "stanza" | "swarmformer" | "f5-tts" | "genmo" | "tensorflowtts" | "tabpfn" | "terratorch" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "trellis" | "ultralytics" | "uni-3dar" | "unity-sentis" | "sana" | "vfi-mamba" | "voicecraft" | "wham" | "whisperkit" | "yolov10" | "3dtopia-xl")[];
|
|
1036
|
+
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "anemoi" | "araclip" | "asteroid" | "audiocraft" | "audioseal" | "ben2" | "bertopic" | "big_vision" | "birder" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "comet" | "cosmos" | "cxr-foundation" | "deepforest" | "depth-anything-v2" | "depth-pro" | "derm-foundation" | "describe-anything" | "dia-tts" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "clipscope" | "cosyvoice" | "cotracker" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "geometry-crafter" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hezar" | "htrflow" | "hunyuan-dit" | "hunyuan3d-2" | "imstoucan" | "index-tts" | "infinite-you" | "keras" | "tf-keras" | "keras-hub" | "k2" | "lightning-ir" | "liveportrait" | "llama-cpp-python" | "mini-omni2" | "mindspore" | "mamba-ssm" | "mars5-tts" | "matanyone" | "mesh-anything" | "merlin" | "medvae" | "mitie" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "model2vec" | "moshi" | "nemo" | "open-oasis" | "open_clip" | "open-sora" | "outetts" | "paddlenlp" | "peft" | "perception-encoder" | "phantom-wan" | "pxia" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "reverb" | "saelens" | "sam2" | "sample-factory" | "sapiens" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "ssr-speech" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "soloaudio" | "stable-baselines3" | "stanza" | "swarmformer" | "f5-tts" | "genmo" | "tensorflowtts" | "tabpfn" | "terratorch" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "trellis" | "ultralytics" | "uni-3dar" | "unity-sentis" | "sana" | "vfi-mamba" | "voicecraft" | "wham" | "whisperkit" | "yolov10" | "zonos" | "3dtopia-xl")[];
|
|
1014
1037
|
//# sourceMappingURL=model-libraries.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,gCAAgC,CAAC;AAEzE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B
|
|
1
|
+
{"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,gCAAgC,CAAC;AAEzE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAu9BI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,EAA+C,eAAe,EAAE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,qzDAQ1B,CAAC"}
|
|
@@ -186,6 +186,13 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
186
186
|
filter: false,
|
|
187
187
|
countDownloads: `path:"scin_dataset_precomputed_embeddings.npz" OR path:"saved_model.pb"`,
|
|
188
188
|
},
|
|
189
|
+
"describe-anything": {
|
|
190
|
+
prettyLabel: "Describe Anything",
|
|
191
|
+
repoName: "Describe Anything",
|
|
192
|
+
repoUrl: "https://github.com/NVlabs/describe-anything",
|
|
193
|
+
snippets: snippets.describe_anything,
|
|
194
|
+
filter: false,
|
|
195
|
+
},
|
|
189
196
|
"dia-tts": {
|
|
190
197
|
prettyLabel: "Dia",
|
|
191
198
|
repoName: "Dia",
|
|
@@ -618,6 +625,14 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
618
625
|
snippets: snippets.perception_encoder,
|
|
619
626
|
countDownloads: `path_extension:"pt"`,
|
|
620
627
|
},
|
|
628
|
+
"phantom-wan": {
|
|
629
|
+
prettyLabel: "Phantom",
|
|
630
|
+
repoName: "Phantom",
|
|
631
|
+
repoUrl: "https://github.com/Phantom-video/Phantom",
|
|
632
|
+
snippets: snippets.phantom_wan,
|
|
633
|
+
filter: false,
|
|
634
|
+
countDownloads: `path_extension:"pth"`,
|
|
635
|
+
},
|
|
621
636
|
pxia: {
|
|
622
637
|
prettyLabel: "pxia",
|
|
623
638
|
repoName: "pxia",
|
|
@@ -965,6 +980,14 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
965
980
|
countDownloads: `path_extension:"pt" OR path_extension:"safetensors"`,
|
|
966
981
|
snippets: snippets.ultralytics,
|
|
967
982
|
},
|
|
983
|
+
zonos: {
|
|
984
|
+
prettyLabel: "Zonos",
|
|
985
|
+
repoName: "Zonos",
|
|
986
|
+
repoUrl: "https://github.com/Zyphra/Zonos",
|
|
987
|
+
docsUrl: "https://github.com/Zyphra/Zonos",
|
|
988
|
+
snippets: snippets.zonos,
|
|
989
|
+
filter: false,
|
|
990
|
+
},
|
|
968
991
|
"3dtopia-xl": {
|
|
969
992
|
prettyLabel: "3DTopia-XL",
|
|
970
993
|
repoName: "3DTopia-XL",
|
package/dist/esm/tasks/index.js
CHANGED
|
@@ -113,7 +113,7 @@ export const TASKS_MODEL_LIBRARIES = {
|
|
|
113
113
|
/**
|
|
114
114
|
* Return the whole TaskData object for a certain task.
|
|
115
115
|
* If the partialTaskData argument is left undefined,
|
|
116
|
-
* the default
|
|
116
|
+
* the default placeholder data will be used.
|
|
117
117
|
*/
|
|
118
118
|
function getData(type, partialTaskData = placeholder) {
|
|
119
119
|
return {
|
package/package.json
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@huggingface/tasks",
|
|
3
|
-
"
|
|
4
|
-
"version": "0.19.2",
|
|
3
|
+
"version": "0.19.4",
|
|
5
4
|
"description": "List of ML tasks for huggingface.co/tasks",
|
|
6
5
|
"repository": "https://github.com/huggingface/huggingface.js.git",
|
|
7
6
|
"publishConfig": {
|
package/src/gguf.ts
CHANGED
|
@@ -1,3 +1,155 @@
|
|
|
1
|
+
// This list is copied from gguf/types.ts, but will all types available (for backward compatibility)
|
|
2
|
+
// NOT to be confused with GGMLQuantizationType, a FileQuantization can contain multiple GGMLQuantizationType
|
|
3
|
+
// For example, Q4_K_M model can contains Q4_K and Q6_K tensors
|
|
4
|
+
export enum GGMLFileQuantizationType {
|
|
5
|
+
F32 = 0,
|
|
6
|
+
F16 = 1,
|
|
7
|
+
Q4_0 = 2,
|
|
8
|
+
Q4_1 = 3,
|
|
9
|
+
Q4_1_SOME_F16 = 4,
|
|
10
|
+
Q4_2 = 5,
|
|
11
|
+
Q4_3 = 6,
|
|
12
|
+
Q8_0 = 7,
|
|
13
|
+
Q5_0 = 8,
|
|
14
|
+
Q5_1 = 9,
|
|
15
|
+
Q2_K = 10,
|
|
16
|
+
Q3_K_S = 11,
|
|
17
|
+
Q3_K_M = 12,
|
|
18
|
+
Q3_K_L = 13,
|
|
19
|
+
Q4_K_S = 14,
|
|
20
|
+
Q4_K_M = 15,
|
|
21
|
+
Q5_K_S = 16,
|
|
22
|
+
Q5_K_M = 17,
|
|
23
|
+
Q6_K = 18,
|
|
24
|
+
IQ2_XXS = 19,
|
|
25
|
+
IQ2_XS = 20,
|
|
26
|
+
Q2_K_S = 21,
|
|
27
|
+
IQ3_XS = 22,
|
|
28
|
+
IQ3_XXS = 23,
|
|
29
|
+
IQ1_S = 24,
|
|
30
|
+
IQ4_NL = 25,
|
|
31
|
+
IQ3_S = 26,
|
|
32
|
+
IQ3_M = 27,
|
|
33
|
+
IQ2_S = 28,
|
|
34
|
+
IQ2_M = 29,
|
|
35
|
+
IQ4_XS = 30,
|
|
36
|
+
IQ1_M = 31,
|
|
37
|
+
BF16 = 32,
|
|
38
|
+
Q4_0_4_4 = 33,
|
|
39
|
+
Q4_0_4_8 = 34,
|
|
40
|
+
Q4_0_8_8 = 35,
|
|
41
|
+
TQ1_0 = 36,
|
|
42
|
+
TQ2_0 = 37,
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
const ggufQuants = Object.values(GGMLFileQuantizationType).filter((v): v is string => typeof v === "string");
|
|
46
|
+
export const GGUF_QUANT_RE = new RegExp(`(?<quant>${ggufQuants.join("|")})` + "(_(?<sizeVariation>[A-Z]+))?");
|
|
47
|
+
export const GGUF_QUANT_RE_GLOBAL = new RegExp(GGUF_QUANT_RE, "g");
|
|
48
|
+
|
|
49
|
+
export function parseGGUFQuantLabel(fname: string): string | undefined {
|
|
50
|
+
const quantLabel = fname.toUpperCase().match(GGUF_QUANT_RE_GLOBAL)?.at(-1); // if there is multiple quant substrings in a name, we prefer the last one
|
|
51
|
+
return quantLabel;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
// order of quantization, from biggest to smallest
|
|
55
|
+
// this list must be in sync with the order in GGMLFileQuantizationType
|
|
56
|
+
// the gguf.spec.ts tests are using verify if the order is correct
|
|
57
|
+
export const GGUF_QUANT_ORDER: GGMLFileQuantizationType[] = [
|
|
58
|
+
GGMLFileQuantizationType.F32,
|
|
59
|
+
GGMLFileQuantizationType.BF16,
|
|
60
|
+
GGMLFileQuantizationType.F16,
|
|
61
|
+
GGMLFileQuantizationType.Q8_0,
|
|
62
|
+
|
|
63
|
+
// 6-bit quantizations
|
|
64
|
+
GGMLFileQuantizationType.Q6_K,
|
|
65
|
+
|
|
66
|
+
// 5-bit quantizations
|
|
67
|
+
GGMLFileQuantizationType.Q5_0,
|
|
68
|
+
GGMLFileQuantizationType.Q5_1,
|
|
69
|
+
GGMLFileQuantizationType.Q5_K_M,
|
|
70
|
+
GGMLFileQuantizationType.Q5_K_S,
|
|
71
|
+
|
|
72
|
+
// 4-bit quantizations
|
|
73
|
+
GGMLFileQuantizationType.Q4_K_M,
|
|
74
|
+
GGMLFileQuantizationType.Q4_K_S,
|
|
75
|
+
GGMLFileQuantizationType.IQ4_NL,
|
|
76
|
+
GGMLFileQuantizationType.IQ4_XS,
|
|
77
|
+
GGMLFileQuantizationType.Q4_0_4_4,
|
|
78
|
+
GGMLFileQuantizationType.Q4_0_4_8,
|
|
79
|
+
GGMLFileQuantizationType.Q4_0_8_8,
|
|
80
|
+
GGMLFileQuantizationType.Q4_0,
|
|
81
|
+
GGMLFileQuantizationType.Q4_1_SOME_F16,
|
|
82
|
+
GGMLFileQuantizationType.Q4_1,
|
|
83
|
+
GGMLFileQuantizationType.Q4_2,
|
|
84
|
+
GGMLFileQuantizationType.Q4_3,
|
|
85
|
+
|
|
86
|
+
// 3-bit quantizations
|
|
87
|
+
GGMLFileQuantizationType.Q3_K_L,
|
|
88
|
+
GGMLFileQuantizationType.Q3_K_M,
|
|
89
|
+
GGMLFileQuantizationType.Q3_K_S,
|
|
90
|
+
GGMLFileQuantizationType.IQ3_M,
|
|
91
|
+
GGMLFileQuantizationType.IQ3_S,
|
|
92
|
+
GGMLFileQuantizationType.IQ3_XS,
|
|
93
|
+
GGMLFileQuantizationType.IQ3_XXS,
|
|
94
|
+
|
|
95
|
+
// 2-bit quantizations
|
|
96
|
+
GGMLFileQuantizationType.Q2_K,
|
|
97
|
+
GGMLFileQuantizationType.Q2_K_S,
|
|
98
|
+
GGMLFileQuantizationType.IQ2_M,
|
|
99
|
+
GGMLFileQuantizationType.IQ2_S,
|
|
100
|
+
GGMLFileQuantizationType.IQ2_XS,
|
|
101
|
+
GGMLFileQuantizationType.IQ2_XXS,
|
|
102
|
+
|
|
103
|
+
// 1-bit quantizations
|
|
104
|
+
GGMLFileQuantizationType.IQ1_S,
|
|
105
|
+
GGMLFileQuantizationType.IQ1_M,
|
|
106
|
+
GGMLFileQuantizationType.TQ1_0,
|
|
107
|
+
GGMLFileQuantizationType.TQ2_0,
|
|
108
|
+
];
|
|
109
|
+
|
|
110
|
+
// This function finds the nearest quantization type that is less than or equal to the given quantization type.
|
|
111
|
+
// It returns undefined if no such quantization type is found.
|
|
112
|
+
export function findNearestQuantType(
|
|
113
|
+
quant: GGMLFileQuantizationType,
|
|
114
|
+
availableQuants: GGMLFileQuantizationType[]
|
|
115
|
+
): GGMLFileQuantizationType | undefined {
|
|
116
|
+
// Create a map for quick index lookup from the defined order
|
|
117
|
+
const orderMap = new Map<GGMLFileQuantizationType, number>();
|
|
118
|
+
GGUF_QUANT_ORDER.forEach((q, index) => {
|
|
119
|
+
orderMap.set(q, index);
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
const targetIndex = orderMap.get(quant) ?? 0; // the 0 case should never happen
|
|
123
|
+
|
|
124
|
+
// Filter the available quantizations to include only those defined in the order map,
|
|
125
|
+
// then sort them according to the GGUF_QUANT_ORDER (from largest/index 0 to smallest/highest index).
|
|
126
|
+
const sortedAvailable = availableQuants
|
|
127
|
+
.filter((q) => orderMap.has(q))
|
|
128
|
+
.sort((a, b) => (orderMap.get(a) ?? Infinity) - (orderMap.get(b) ?? Infinity));
|
|
129
|
+
|
|
130
|
+
// If no valid quantizations are available after filtering
|
|
131
|
+
if (sortedAvailable.length === 0) {
|
|
132
|
+
return undefined;
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
// Iterate through the sorted available quantizations (largest to smallest).
|
|
136
|
+
// Find the first one whose order index is >= the target index.
|
|
137
|
+
// This means finding the largest quantization that is smaller than or equal to the target.
|
|
138
|
+
for (const availableQuant of sortedAvailable) {
|
|
139
|
+
// We know the key exists due to the filter above.
|
|
140
|
+
const availableIndex = orderMap.get(availableQuant) ?? 0;
|
|
141
|
+
if (availableIndex >= targetIndex) {
|
|
142
|
+
return availableQuant;
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
// If the loop completes, it means all available quantizations are larger (have a smaller index)
|
|
147
|
+
// than the target quantization. In this case, return the "smallest" available quantization,
|
|
148
|
+
// which is the last element in the sorted list (highest index among available).
|
|
149
|
+
return sortedAvailable[sortedAvailable.length - 1];
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// This list is only used to calculate the size of the model, NOT to be confused with the quantization FILE type
|
|
1
153
|
export enum GGMLQuantizationType {
|
|
2
154
|
F32 = 0,
|
|
3
155
|
F16 = 1,
|
|
@@ -28,13 +180,6 @@ export enum GGMLQuantizationType {
|
|
|
28
180
|
F64 = 28,
|
|
29
181
|
IQ1_M = 29,
|
|
30
182
|
BF16 = 30,
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
const ggufQuants = Object.values(GGMLQuantizationType).filter((v): v is string => typeof v === "string");
|
|
34
|
-
export const GGUF_QUANT_RE = new RegExp(`(?<quant>${ggufQuants.join("|")})` + "(_(?<sizeVariation>[A-Z]+))?");
|
|
35
|
-
export const GGUF_QUANT_RE_GLOBAL = new RegExp(GGUF_QUANT_RE, "g");
|
|
36
|
-
|
|
37
|
-
export function parseGGUFQuantLabel(fname: string): string | undefined {
|
|
38
|
-
const quantLabel = fname.toUpperCase().match(GGUF_QUANT_RE_GLOBAL)?.at(-1); // if there is multiple quant substrings in a name, we prefer the last one
|
|
39
|
-
return quantLabel;
|
|
183
|
+
TQ1_0 = 34,
|
|
184
|
+
TQ2_0 = 35,
|
|
40
185
|
}
|