@huggingface/tasks 0.10.9 → 0.10.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -4426,13 +4426,15 @@ var transformers = (model) => {
4426
4426
  ].join("\n");
4427
4427
  }
4428
4428
  if (model.pipeline_tag && LIBRARY_TASK_MAPPING.transformers?.includes(model.pipeline_tag)) {
4429
- const pipelineSnippet = [
4430
- "# Use a pipeline as a high-level helper",
4431
- "from transformers import pipeline",
4432
- "",
4433
- `pipe = pipeline("${model.pipeline_tag}", model="${model.id}"` + remote_code_snippet + ")"
4434
- ].join("\n");
4435
- return [pipelineSnippet, autoSnippet];
4429
+ const pipelineSnippet = ["# Use a pipeline as a high-level helper", "from transformers import pipeline", ""];
4430
+ if (model.tags.includes("conversational") && model.config?.tokenizer_config?.chat_template) {
4431
+ pipelineSnippet.push("messages = [", ' {"role": "user", "content": "Who are you?"},', "]");
4432
+ }
4433
+ pipelineSnippet.push(`pipe = pipeline("${model.pipeline_tag}", model="${model.id}"` + remote_code_snippet + ")");
4434
+ if (model.tags.includes("conversational") && model.config?.tokenizer_config?.chat_template) {
4435
+ pipelineSnippet.push("pipe(messages)");
4436
+ }
4437
+ return [pipelineSnippet.join("\n"), autoSnippet];
4436
4438
  }
4437
4439
  return [autoSnippet];
4438
4440
  };
@@ -5614,6 +5616,10 @@ var SKUS = {
5614
5616
  tflops: 32.62,
5615
5617
  memory: [24]
5616
5618
  },
5619
+ "GTX 1660": {
5620
+ tflops: 10.05,
5621
+ memory: [6]
5622
+ },
5617
5623
  "GTX 1650 Mobile": {
5618
5624
  tflops: 6.39,
5619
5625
  memory: [4]
@@ -5856,13 +5862,21 @@ function isGgufModel(model) {
5856
5862
  }
5857
5863
  var snippetLlamacpp = (model) => {
5858
5864
  return [
5859
- `
5860
- ## Install and build llama.cpp with curl support
5865
+ `# Option 1: use llama.cpp with brew
5866
+ brew install llama.cpp
5867
+
5868
+ # Load and run the model
5869
+ llama \\
5870
+ --hf-repo "${model.id}" \\
5871
+ --hf-file file.gguf \\
5872
+ -p "I believe the meaning of life is" \\
5873
+ -n 128`,
5874
+ `# Option 2: build llama.cpp from source with curl support
5861
5875
  git clone https://github.com/ggerganov/llama.cpp.git
5862
5876
  cd llama.cpp
5863
5877
  LLAMA_CURL=1 make
5864
- `,
5865
- `## Load and run the model
5878
+
5879
+ # Load and run the model
5866
5880
  ./main \\
5867
5881
  --hf-repo "${model.id}" \\
5868
5882
  -m file.gguf \\
package/dist/index.js CHANGED
@@ -4388,13 +4388,15 @@ var transformers = (model) => {
4388
4388
  ].join("\n");
4389
4389
  }
4390
4390
  if (model.pipeline_tag && LIBRARY_TASK_MAPPING.transformers?.includes(model.pipeline_tag)) {
4391
- const pipelineSnippet = [
4392
- "# Use a pipeline as a high-level helper",
4393
- "from transformers import pipeline",
4394
- "",
4395
- `pipe = pipeline("${model.pipeline_tag}", model="${model.id}"` + remote_code_snippet + ")"
4396
- ].join("\n");
4397
- return [pipelineSnippet, autoSnippet];
4391
+ const pipelineSnippet = ["# Use a pipeline as a high-level helper", "from transformers import pipeline", ""];
4392
+ if (model.tags.includes("conversational") && model.config?.tokenizer_config?.chat_template) {
4393
+ pipelineSnippet.push("messages = [", ' {"role": "user", "content": "Who are you?"},', "]");
4394
+ }
4395
+ pipelineSnippet.push(`pipe = pipeline("${model.pipeline_tag}", model="${model.id}"` + remote_code_snippet + ")");
4396
+ if (model.tags.includes("conversational") && model.config?.tokenizer_config?.chat_template) {
4397
+ pipelineSnippet.push("pipe(messages)");
4398
+ }
4399
+ return [pipelineSnippet.join("\n"), autoSnippet];
4398
4400
  }
4399
4401
  return [autoSnippet];
4400
4402
  };
@@ -5576,6 +5578,10 @@ var SKUS = {
5576
5578
  tflops: 32.62,
5577
5579
  memory: [24]
5578
5580
  },
5581
+ "GTX 1660": {
5582
+ tflops: 10.05,
5583
+ memory: [6]
5584
+ },
5579
5585
  "GTX 1650 Mobile": {
5580
5586
  tflops: 6.39,
5581
5587
  memory: [4]
@@ -5818,13 +5824,21 @@ function isGgufModel(model) {
5818
5824
  }
5819
5825
  var snippetLlamacpp = (model) => {
5820
5826
  return [
5821
- `
5822
- ## Install and build llama.cpp with curl support
5827
+ `# Option 1: use llama.cpp with brew
5828
+ brew install llama.cpp
5829
+
5830
+ # Load and run the model
5831
+ llama \\
5832
+ --hf-repo "${model.id}" \\
5833
+ --hf-file file.gguf \\
5834
+ -p "I believe the meaning of life is" \\
5835
+ -n 128`,
5836
+ `# Option 2: build llama.cpp from source with curl support
5823
5837
  git clone https://github.com/ggerganov/llama.cpp.git
5824
5838
  cd llama.cpp
5825
5839
  LLAMA_CURL=1 make
5826
- `,
5827
- `## Load and run the model
5840
+
5841
+ # Load and run the model
5828
5842
  ./main \\
5829
5843
  --hf-repo "${model.id}" \\
5830
5844
  -m file.gguf \\
@@ -180,6 +180,10 @@ export declare const SKUS: {
180
180
  tflops: number;
181
181
  memory: number[];
182
182
  };
183
+ "GTX 1660": {
184
+ tflops: number;
185
+ memory: number[];
186
+ };
183
187
  "GTX 1650 Mobile": {
184
188
  tflops: number;
185
189
  memory: number[];
@@ -1 +1 @@
1
- {"version":3,"file":"hardware.d.ts","sourceRoot":"","sources":["../../src/hardware.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,eAAO,MAAM,iDAAiD,QAAW,CAAC;AAC1E,eAAO,MAAM,yDAAyD,QAAW,CAAC;AAClF,eAAO,MAAM,oCAAoC,QAAU,CAAC;AAE5D;;;GAGG;AACH,eAAO,MAAM,+CAA+C,QAAW,CAAC;AAExE,MAAM,WAAW,YAAY;IAC5B;;;;;;;;;OASG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;;OAGG;IACH,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;CAClB;AAED,eAAO,MAAM,sBAAsB,UAAqD,CAAC;AAEzF,eAAO,MAAM,IAAI;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA+XuD,CAAC;AAEzE,MAAM,MAAM,OAAO,GAAG,MAAM,OAAO,IAAI,CAAC"}
1
+ {"version":3,"file":"hardware.d.ts","sourceRoot":"","sources":["../../src/hardware.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,eAAO,MAAM,iDAAiD,QAAW,CAAC;AAC1E,eAAO,MAAM,yDAAyD,QAAW,CAAC;AAClF,eAAO,MAAM,oCAAoC,QAAU,CAAC;AAE5D;;;GAGG;AACH,eAAO,MAAM,+CAA+C,QAAW,CAAC;AAExE,MAAM,WAAW,YAAY;IAC5B;;;;;;;;;OASG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;;OAGG;IACH,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;CAClB;AAED,eAAO,MAAM,sBAAsB,UAAqD,CAAC;AAEzF,eAAO,MAAM,IAAI;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAmYuD,CAAC;AAEzE,MAAM,MAAM,OAAO,GAAG,MAAM,OAAO,IAAI,CAAC"}
@@ -1 +1 @@
1
- {"version":3,"file":"local-apps.d.ts","sourceRoot":"","sources":["../../src/local-apps.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAEhD;;GAEG;AACH,MAAM,MAAM,QAAQ,GAAG;IACtB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,QAAQ,EAAE,YAAY,CAAC;IACvB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,kBAAkB,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,OAAO,CAAC;CAClD,GAAG,CACD;IACA;;OAEG;IACH,QAAQ,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,GAAG,CAAC;CACnC,GACD;IACA;;OAEG;IACH,OAAO,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,GAAG,MAAM,EAAE,CAAC;CAChD,CACH,CAAC;AAEF,iBAAS,WAAW,CAAC,KAAK,EAAE,SAAS,WAEpC;AAmBD;;;;;;;;;;GAUG;AACH,eAAO,MAAM,UAAU;;;;;;yBA5BS,SAAS,KAAG,MAAM,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAwFhB,CAAC;AAErC,MAAM,MAAM,WAAW,GAAG,MAAM,OAAO,UAAU,CAAC"}
1
+ {"version":3,"file":"local-apps.d.ts","sourceRoot":"","sources":["../../src/local-apps.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAEhD;;GAEG;AACH,MAAM,MAAM,QAAQ,GAAG;IACtB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,QAAQ,EAAE,YAAY,CAAC;IACvB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,kBAAkB,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,OAAO,CAAC;CAClD,GAAG,CACD;IACA;;OAEG;IACH,QAAQ,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,GAAG,CAAC;CACnC,GACD;IACA;;OAEG;IACH,OAAO,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,GAAG,MAAM,EAAE,CAAC;CAChD,CACH,CAAC;AAEF,iBAAS,WAAW,CAAC,KAAK,EAAE,SAAS,WAEpC;AA2BD;;;;;;;;;;GAUG;AACH,eAAO,MAAM,UAAU;;;;;;yBApCS,SAAS,KAAG,MAAM,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAgGhB,CAAC;AAErC,MAAM,MAAM,WAAW,GAAG,MAAM,OAAO,UAAU,CAAC"}
@@ -1 +1 @@
1
- {"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAY9C,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAMF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AA+BF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAK9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AAEF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAI7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EAwCrD,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAK5C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AA6BF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAUnD,CAAC"}
1
+ {"version":3,"file":"model-libraries-snippets.d.ts","sourceRoot":"","sources":["../../src/model-libraries-snippets.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAY9C,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAkBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AAMF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAIjD,CAAC;AA+BF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAUlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAMlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAIF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAO/C,CAAC;AAEF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAMhD,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAK9C,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EASlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAKlD,CAAC;AAEF,eAAO,MAAM,SAAS,UAAW,SAAS,KAAG,MAAM,EAsBlD,CAAC;AAEF,eAAO,MAAM,uBAAuB,UAAW,SAAS,KAAG,MAAM,EAehE,CAAC;AAiBF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAKvD,CAAC;AAyBF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAOtD,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAI7C,CAAC;AAsCF,eAAO,MAAM,OAAO,UAAW,SAAS,KAAG,MAAM,EAehD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,aAAa,UAAW,SAAS,KAAG,MAAM,EAEtD,CAAC;AAEF,eAAO,MAAM,oBAAoB,UAAW,SAAS,KAAG,MAAM,EAI7D,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAU9C,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAIpD,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAK/C,CAAC;AAkBF,eAAO,MAAM,WAAW,UAAW,SAAS,KAAG,MAAM,EAkBpD,CAAC;AAEF,eAAO,MAAM,YAAY,UAAW,SAAS,KAAG,MAAM,EA4CrD,CAAC;AAEF,eAAO,MAAM,cAAc,UAAW,SAAS,KAAG,MAAM,EAcvD,CAAC;AAiBF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAkB7C,CAAC;AAEF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAKjD,CAAC;AAEF,eAAO,MAAM,gBAAgB,UAAW,SAAS,KAAG,MAAM,EAMzD,CAAC;AAgBF,eAAO,MAAM,QAAQ,UAAW,SAAS,KAAG,MAAM,EAEjD,CAAC;AAEF,eAAO,MAAM,MAAM,QAA6B,MAAM,EAMrD,CAAC;AAEF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAInD,CAAC;AAEF,eAAO,MAAM,GAAG,UAAW,SAAS,KAAG,MAAM,EAK5C,CAAC;AAEF,eAAO,MAAM,KAAK,UAAW,SAAS,KAAG,MAAM,EAI9C,CAAC;AAEF,eAAO,MAAM,IAAI,UAAW,SAAS,KAAG,MAAM,EAQ7C,CAAC;AAEF,eAAO,MAAM,MAAM,UAAW,SAAS,KAAG,MAAM,EAI/C,CAAC;AA6BF,eAAO,MAAM,UAAU,UAAW,SAAS,KAAG,MAAM,EAUnD,CAAC"}
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@huggingface/tasks",
3
3
  "packageManager": "pnpm@8.10.5",
4
- "version": "0.10.9",
4
+ "version": "0.10.11",
5
5
  "description": "List of ML tasks for huggingface.co/tasks",
6
6
  "repository": "https://github.com/huggingface/huggingface.js.git",
7
7
  "publishConfig": {
package/src/hardware.ts CHANGED
@@ -184,6 +184,10 @@ export const SKUS = {
184
184
  tflops: 32.62,
185
185
  memory: [24],
186
186
  },
187
+ "GTX 1660": {
188
+ tflops: 10.05,
189
+ memory: [6],
190
+ },
187
191
  "GTX 1650 Mobile": {
188
192
  tflops: 6.39,
189
193
  memory: [4],
package/src/local-apps.ts CHANGED
@@ -48,13 +48,21 @@ function isGgufModel(model: ModelData) {
48
48
 
49
49
  const snippetLlamacpp = (model: ModelData): string[] => {
50
50
  return [
51
- `
52
- ## Install and build llama.cpp with curl support
51
+ `# Option 1: use llama.cpp with brew
52
+ brew install llama.cpp
53
+
54
+ # Load and run the model
55
+ llama \\
56
+ --hf-repo "${model.id}" \\
57
+ --hf-file file.gguf \\
58
+ -p "I believe the meaning of life is" \\
59
+ -n 128`,
60
+ `# Option 2: build llama.cpp from source with curl support
53
61
  git clone https://github.com/ggerganov/llama.cpp.git
54
62
  cd llama.cpp
55
63
  LLAMA_CURL=1 make
56
- `,
57
- `## Load and run the model
64
+
65
+ # Load and run the model
58
66
  ./main \\
59
67
  --hf-repo "${model.id}" \\
60
68
  -m file.gguf \\
@@ -440,13 +440,17 @@ export const transformers = (model: ModelData): string[] => {
440
440
  }
441
441
 
442
442
  if (model.pipeline_tag && LIBRARY_TASK_MAPPING.transformers?.includes(model.pipeline_tag)) {
443
- const pipelineSnippet = [
444
- "# Use a pipeline as a high-level helper",
445
- "from transformers import pipeline",
446
- "",
447
- `pipe = pipeline("${model.pipeline_tag}", model="${model.id}"` + remote_code_snippet + ")",
448
- ].join("\n");
449
- return [pipelineSnippet, autoSnippet];
443
+ const pipelineSnippet = ["# Use a pipeline as a high-level helper", "from transformers import pipeline", ""];
444
+
445
+ if (model.tags.includes("conversational") && model.config?.tokenizer_config?.chat_template) {
446
+ pipelineSnippet.push("messages = [", ' {"role": "user", "content": "Who are you?"},', "]");
447
+ }
448
+ pipelineSnippet.push(`pipe = pipeline("${model.pipeline_tag}", model="${model.id}"` + remote_code_snippet + ")");
449
+ if (model.tags.includes("conversational") && model.config?.tokenizer_config?.chat_template) {
450
+ pipelineSnippet.push("pipe(messages)");
451
+ }
452
+
453
+ return [pipelineSnippet.join("\n"), autoSnippet];
450
454
  }
451
455
  return [autoSnippet];
452
456
  };