@huggingface/tasks 0.10.5 → 0.10.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +11 -2
- package/dist/index.js +11 -2
- package/dist/src/local-apps.d.ts +2 -2
- package/dist/src/local-apps.d.ts.map +1 -1
- package/package.json +1 -1
- package/src/local-apps.ts +13 -4
package/dist/index.cjs
CHANGED
|
@@ -5738,11 +5738,20 @@ function isGgufModel(model) {
|
|
|
5738
5738
|
return model.tags.includes("gguf");
|
|
5739
5739
|
}
|
|
5740
5740
|
var snippetLlamacpp = (model) => {
|
|
5741
|
-
return
|
|
5741
|
+
return [
|
|
5742
|
+
`
|
|
5743
|
+
## Install and build llama.cpp with curl support
|
|
5744
|
+
git clone https://github.com/ggerganov/llama.cpp.git
|
|
5745
|
+
cd llama.cpp
|
|
5746
|
+
LLAMA_CURL=1 make
|
|
5747
|
+
`,
|
|
5748
|
+
`## Load and run the model
|
|
5749
|
+
./main \\
|
|
5742
5750
|
--hf-repo "${model.id}" \\
|
|
5743
5751
|
-m file.gguf \\
|
|
5744
5752
|
-p "I believe the meaning of life is" \\
|
|
5745
|
-
-n 128
|
|
5753
|
+
-n 128`
|
|
5754
|
+
];
|
|
5746
5755
|
};
|
|
5747
5756
|
var LOCAL_APPS = {
|
|
5748
5757
|
"llama.cpp": {
|
package/dist/index.js
CHANGED
|
@@ -5700,11 +5700,20 @@ function isGgufModel(model) {
|
|
|
5700
5700
|
return model.tags.includes("gguf");
|
|
5701
5701
|
}
|
|
5702
5702
|
var snippetLlamacpp = (model) => {
|
|
5703
|
-
return
|
|
5703
|
+
return [
|
|
5704
|
+
`
|
|
5705
|
+
## Install and build llama.cpp with curl support
|
|
5706
|
+
git clone https://github.com/ggerganov/llama.cpp.git
|
|
5707
|
+
cd llama.cpp
|
|
5708
|
+
LLAMA_CURL=1 make
|
|
5709
|
+
`,
|
|
5710
|
+
`## Load and run the model
|
|
5711
|
+
./main \\
|
|
5704
5712
|
--hf-repo "${model.id}" \\
|
|
5705
5713
|
-m file.gguf \\
|
|
5706
5714
|
-p "I believe the meaning of life is" \\
|
|
5707
|
-
-n 128
|
|
5715
|
+
-n 128`
|
|
5716
|
+
];
|
|
5708
5717
|
};
|
|
5709
5718
|
var LOCAL_APPS = {
|
|
5710
5719
|
"llama.cpp": {
|
package/dist/src/local-apps.d.ts
CHANGED
|
@@ -34,7 +34,7 @@ export type LocalApp = {
|
|
|
34
34
|
/**
|
|
35
35
|
* And if not (mostly llama.cpp), snippet to copy/paste in your terminal
|
|
36
36
|
*/
|
|
37
|
-
snippet: (model: ModelData) => string;
|
|
37
|
+
snippet: (model: ModelData) => string | string[];
|
|
38
38
|
});
|
|
39
39
|
declare function isGgufModel(model: ModelData): boolean;
|
|
40
40
|
/**
|
|
@@ -54,7 +54,7 @@ export declare const LOCAL_APPS: {
|
|
|
54
54
|
docsUrl: string;
|
|
55
55
|
mainTask: "text-generation";
|
|
56
56
|
displayOnModelPage: typeof isGgufModel;
|
|
57
|
-
snippet: (model: ModelData) => string;
|
|
57
|
+
snippet: (model: ModelData) => string[];
|
|
58
58
|
};
|
|
59
59
|
lmstudio: {
|
|
60
60
|
prettyLabel: string;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"local-apps.d.ts","sourceRoot":"","sources":["../../src/local-apps.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAEhD;;GAEG;AACH,MAAM,MAAM,QAAQ,GAAG;IACtB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,QAAQ,EAAE,YAAY,CAAC;IACvB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,kBAAkB,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,OAAO,CAAC;CAClD,GAAG,CACD;IACA;;OAEG;IACH,QAAQ,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,GAAG,CAAC;CACnC,GACD;IACA;;OAEG;IACH,OAAO,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,CAAC;
|
|
1
|
+
{"version":3,"file":"local-apps.d.ts","sourceRoot":"","sources":["../../src/local-apps.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAEhD;;GAEG;AACH,MAAM,MAAM,QAAQ,GAAG;IACtB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,QAAQ,EAAE,YAAY,CAAC;IACvB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,kBAAkB,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,OAAO,CAAC;CAClD,GAAG,CACD;IACA;;OAEG;IACH,QAAQ,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,GAAG,CAAC;CACnC,GACD;IACA;;OAEG;IACH,OAAO,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,GAAG,MAAM,EAAE,CAAC;CAChD,CACH,CAAC;AAEF,iBAAS,WAAW,CAAC,KAAK,EAAE,SAAS,WAEpC;AAmBD;;;;;;;;;;GAUG;AACH,eAAO,MAAM,UAAU;;;;;;yBA5BS,SAAS,KAAG,MAAM,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAiFhB,CAAC;AAErC,MAAM,MAAM,WAAW,GAAG,MAAM,OAAO,UAAU,CAAC"}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@huggingface/tasks",
|
|
3
3
|
"packageManager": "pnpm@8.10.5",
|
|
4
|
-
"version": "0.10.
|
|
4
|
+
"version": "0.10.6",
|
|
5
5
|
"description": "List of ML tasks for huggingface.co/tasks",
|
|
6
6
|
"repository": "https://github.com/huggingface/huggingface.js.git",
|
|
7
7
|
"publishConfig": {
|
package/src/local-apps.ts
CHANGED
|
@@ -38,7 +38,7 @@ export type LocalApp = {
|
|
|
38
38
|
/**
|
|
39
39
|
* And if not (mostly llama.cpp), snippet to copy/paste in your terminal
|
|
40
40
|
*/
|
|
41
|
-
snippet: (model: ModelData) => string;
|
|
41
|
+
snippet: (model: ModelData) => string | string[];
|
|
42
42
|
}
|
|
43
43
|
);
|
|
44
44
|
|
|
@@ -46,12 +46,21 @@ function isGgufModel(model: ModelData) {
|
|
|
46
46
|
return model.tags.includes("gguf");
|
|
47
47
|
}
|
|
48
48
|
|
|
49
|
-
const snippetLlamacpp = (model: ModelData): string => {
|
|
50
|
-
return
|
|
49
|
+
const snippetLlamacpp = (model: ModelData): string[] => {
|
|
50
|
+
return [
|
|
51
|
+
`
|
|
52
|
+
## Install and build llama.cpp with curl support
|
|
53
|
+
git clone https://github.com/ggerganov/llama.cpp.git
|
|
54
|
+
cd llama.cpp
|
|
55
|
+
LLAMA_CURL=1 make
|
|
56
|
+
`,
|
|
57
|
+
`## Load and run the model
|
|
58
|
+
./main \\
|
|
51
59
|
--hf-repo "${model.id}" \\
|
|
52
60
|
-m file.gguf \\
|
|
53
61
|
-p "I believe the meaning of life is" \\
|
|
54
|
-
-n 128
|
|
62
|
+
-n 128`,
|
|
63
|
+
];
|
|
55
64
|
};
|
|
56
65
|
|
|
57
66
|
/**
|