@huggingface/tasks 0.19.30 → 0.19.32

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- {"version":3,"file":"local-apps.d.ts","sourceRoot":"","sources":["../../src/local-apps.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAKnD,MAAM,WAAW,eAAe;IAC/B;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,CAAC;CAC3B;AAED;;GAEG;AACH,MAAM,MAAM,QAAQ,GAAG;IACtB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,QAAQ,EAAE,YAAY,CAAC;IACvB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,kBAAkB,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,OAAO,CAAC;CAClD,GAAG,CACD;IACA;;OAEG;IACH,QAAQ,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,GAAG,CAAC;CACtD,GACD;IACA;;;;OAIG;IACH,OAAO,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,MAAM,GAAG,MAAM,EAAE,GAAG,eAAe,GAAG,eAAe,EAAE,CAAC;CACzG,CACH,CAAC;AAqBF,iBAAS,UAAU,CAAC,KAAK,EAAE,SAAS,GAAG,OAAO,CAE7C;AAED,iBAAS,mBAAmB,CAAC,KAAK,EAAE,SAAS,WAE5C;AAqND;;;;;;;;;;GAUG;AACH,eAAO,MAAM,UAAU;;;;;;yBA/MS,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;;yBAsCzC,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;oCA4LzD,SAAS;yBAnJX,SAAS,KAAG,eAAe,EAAE;;;;;;;yBA8E5B,SAAS,KAAG,eAAe,EAAE;;;;;;;yBA7B/B,SAAS,KAAG,eAAe,EAAE;;;;;;;;;;;;;;yBAxEzB,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;yBAJjD,SAAS,aAAa,MAAM,KAAG,MAAM;;;;;;;yBA4I1B,SAAS,aAAa,MAAM,KAAG,MAAM;;CAqL1C,CAAC;AAErC,MAAM,MAAM,WAAW,GAAG,MAAM,OAAO,UAAU,CAAC"}
1
+ {"version":3,"file":"local-apps.d.ts","sourceRoot":"","sources":["../../src/local-apps.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAKnD,MAAM,WAAW,eAAe;IAC/B;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,CAAC;CAC3B;AAED;;GAEG;AACH,MAAM,MAAM,QAAQ,GAAG;IACtB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,QAAQ,EAAE,YAAY,CAAC;IACvB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,kBAAkB,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,OAAO,CAAC;CAClD,GAAG,CACD;IACA;;OAEG;IACH,QAAQ,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,GAAG,CAAC;CACtD,GACD;IACA;;;;OAIG;IACH,OAAO,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,MAAM,GAAG,MAAM,EAAE,GAAG,eAAe,GAAG,eAAe,EAAE,CAAC;CACzG,CACH,CAAC;AAqBF,iBAAS,UAAU,CAAC,KAAK,EAAE,SAAS,GAAG,OAAO,CAE7C;AAED,iBAAS,mBAAmB,CAAC,KAAK,EAAE,SAAS,WAE5C;AAmOD;;;;;;;;;;GAUG;AACH,eAAO,MAAM,UAAU;;;;;;yBA7NS,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;;yBAsCzC,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;oCA0MzD,SAAS;yBAjKX,SAAS,KAAG,eAAe,EAAE;;;;;;;yBA4F5B,SAAS,KAAG,eAAe,EAAE;;;;;;;yBA7B/B,SAAS,KAAG,eAAe,EAAE;;;;;;;;;;;;;;yBAtFzB,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;yBAJjD,SAAS,aAAa,MAAM,KAAG,MAAM;;;;;;;yBA0J1B,SAAS,aAAa,MAAM,KAAG,MAAM;;CAqL1C,CAAC;AAErC,MAAM,MAAM,WAAW,GAAG,MAAM,OAAO,UAAU,CAAC"}
@@ -131,10 +131,25 @@ curl -X POST "http://localhost:8000/v1/completions" \\
131
131
  "temperature": 0.5
132
132
  }'`;
133
133
  const runCommand = model.tags.includes("conversational") ? runCommandInstruct : runCommandNonInstruct;
134
+ let setup;
135
+ let dockerCommand;
136
+ if (model.tags.includes("mistral-common")) {
137
+ setup = [
138
+ "# Install vLLM from pip:",
139
+ "pip install vllm",
140
+ "# Make sure you have the latest version of mistral-common installed:",
141
+ "pip install --upgrade mistral-common",
142
+ ].join("\n");
143
+ dockerCommand = `# Load and run the model:\ndocker exec -it my_vllm_container bash -c "vllm serve ${model.id} --tokenizer_mode mistral --config_format mistral --load_format mistral --tool-call-parser mistral --enable-auto-tool-choice"`;
144
+ }
145
+ else {
146
+ setup = ["# Install vLLM from pip:", "pip install vllm"].join("\n");
147
+ dockerCommand = `# Load and run the model:\ndocker exec -it my_vllm_container bash -c "vllm serve ${model.id}"`;
148
+ }
134
149
  return [
135
150
  {
136
151
  title: "Install from pip",
137
- setup: ["# Install vLLM from pip:", "pip install vllm"].join("\n"),
152
+ setup: setup,
138
153
  content: [`# Load and run the model:\nvllm serve "${model.id}"`, runCommand],
139
154
  },
140
155
  {
@@ -150,10 +165,7 @@ curl -X POST "http://localhost:8000/v1/completions" \\
150
165
  ` vllm/vllm-openai:latest \\`,
151
166
  ` --model ${model.id}`,
152
167
  ].join("\n"),
153
- content: [
154
- `# Load and run the model:\ndocker exec -it my_vllm_container bash -c "vllm serve ${model.id}"`,
155
- runCommand,
156
- ],
168
+ content: [dockerCommand, runCommand],
157
169
  },
158
170
  ];
159
171
  };
@@ -30,7 +30,7 @@ export interface LibraryUiElement {
30
30
  * Elastic query used to count this library's model downloads
31
31
  *
32
32
  * By default, those files are counted:
33
- * "config.json", "config.yaml", "hyperparams.yaml", "meta.yaml"
33
+ * "config.json", "config.yaml", "hyperparams.yaml", "params.json", "meta.yaml"
34
34
  */
35
35
  countDownloads?: ElasticSearchQuery;
36
36
  /**
@@ -1 +1 @@
1
- {"version":3,"file":"local-apps.d.ts","sourceRoot":"","sources":["../../src/local-apps.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAKnD,MAAM,WAAW,eAAe;IAC/B;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,CAAC;CAC3B;AAED;;GAEG;AACH,MAAM,MAAM,QAAQ,GAAG;IACtB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,QAAQ,EAAE,YAAY,CAAC;IACvB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,kBAAkB,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,OAAO,CAAC;CAClD,GAAG,CACD;IACA;;OAEG;IACH,QAAQ,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,GAAG,CAAC;CACtD,GACD;IACA;;;;OAIG;IACH,OAAO,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,MAAM,GAAG,MAAM,EAAE,GAAG,eAAe,GAAG,eAAe,EAAE,CAAC;CACzG,CACH,CAAC;AAqBF,iBAAS,UAAU,CAAC,KAAK,EAAE,SAAS,GAAG,OAAO,CAE7C;AAED,iBAAS,mBAAmB,CAAC,KAAK,EAAE,SAAS,WAE5C;AAqND;;;;;;;;;;GAUG;AACH,eAAO,MAAM,UAAU;;;;;;yBA/MS,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;;yBAsCzC,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;oCA4LzD,SAAS;yBAnJX,SAAS,KAAG,eAAe,EAAE;;;;;;;yBA8E5B,SAAS,KAAG,eAAe,EAAE;;;;;;;yBA7B/B,SAAS,KAAG,eAAe,EAAE;;;;;;;;;;;;;;yBAxEzB,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;yBAJjD,SAAS,aAAa,MAAM,KAAG,MAAM;;;;;;;yBA4I1B,SAAS,aAAa,MAAM,KAAG,MAAM;;CAqL1C,CAAC;AAErC,MAAM,MAAM,WAAW,GAAG,MAAM,OAAO,UAAU,CAAC"}
1
+ {"version":3,"file":"local-apps.d.ts","sourceRoot":"","sources":["../../src/local-apps.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAKnD,MAAM,WAAW,eAAe;IAC/B;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,OAAO,EAAE,MAAM,GAAG,MAAM,EAAE,CAAC;CAC3B;AAED;;GAEG;AACH,MAAM,MAAM,QAAQ,GAAG;IACtB;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,QAAQ,EAAE,YAAY,CAAC;IACvB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IAEpB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,kBAAkB,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,OAAO,CAAC;CAClD,GAAG,CACD;IACA;;OAEG;IACH,QAAQ,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,GAAG,CAAC;CACtD,GACD;IACA;;;;OAIG;IACH,OAAO,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,MAAM,GAAG,MAAM,EAAE,GAAG,eAAe,GAAG,eAAe,EAAE,CAAC;CACzG,CACH,CAAC;AAqBF,iBAAS,UAAU,CAAC,KAAK,EAAE,SAAS,GAAG,OAAO,CAE7C;AAED,iBAAS,mBAAmB,CAAC,KAAK,EAAE,SAAS,WAE5C;AAmOD;;;;;;;;;;GAUG;AACH,eAAO,MAAM,UAAU;;;;;;yBA7NS,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;;yBAsCzC,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;oCA0MzD,SAAS;yBAjKX,SAAS,KAAG,eAAe,EAAE;;;;;;;yBA4F5B,SAAS,KAAG,eAAe,EAAE;;;;;;;yBA7B/B,SAAS,KAAG,eAAe,EAAE;;;;;;;;;;;;;;yBAtFzB,SAAS,aAAa,MAAM,KAAG,eAAe,EAAE;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;yBAJjD,SAAS,aAAa,MAAM,KAAG,MAAM;;;;;;;yBA0J1B,SAAS,aAAa,MAAM,KAAG,MAAM;;CAqL1C,CAAC;AAErC,MAAM,MAAM,WAAW,GAAG,MAAM,OAAO,UAAU,CAAC"}
@@ -128,10 +128,25 @@ curl -X POST "http://localhost:8000/v1/completions" \\
128
128
  "temperature": 0.5
129
129
  }'`;
130
130
  const runCommand = model.tags.includes("conversational") ? runCommandInstruct : runCommandNonInstruct;
131
+ let setup;
132
+ let dockerCommand;
133
+ if (model.tags.includes("mistral-common")) {
134
+ setup = [
135
+ "# Install vLLM from pip:",
136
+ "pip install vllm",
137
+ "# Make sure you have the latest version of mistral-common installed:",
138
+ "pip install --upgrade mistral-common",
139
+ ].join("\n");
140
+ dockerCommand = `# Load and run the model:\ndocker exec -it my_vllm_container bash -c "vllm serve ${model.id} --tokenizer_mode mistral --config_format mistral --load_format mistral --tool-call-parser mistral --enable-auto-tool-choice"`;
141
+ }
142
+ else {
143
+ setup = ["# Install vLLM from pip:", "pip install vllm"].join("\n");
144
+ dockerCommand = `# Load and run the model:\ndocker exec -it my_vllm_container bash -c "vllm serve ${model.id}"`;
145
+ }
131
146
  return [
132
147
  {
133
148
  title: "Install from pip",
134
- setup: ["# Install vLLM from pip:", "pip install vllm"].join("\n"),
149
+ setup: setup,
135
150
  content: [`# Load and run the model:\nvllm serve "${model.id}"`, runCommand],
136
151
  },
137
152
  {
@@ -147,10 +162,7 @@ curl -X POST "http://localhost:8000/v1/completions" \\
147
162
  ` vllm/vllm-openai:latest \\`,
148
163
  ` --model ${model.id}`,
149
164
  ].join("\n"),
150
- content: [
151
- `# Load and run the model:\ndocker exec -it my_vllm_container bash -c "vllm serve ${model.id}"`,
152
- runCommand,
153
- ],
165
+ content: [dockerCommand, runCommand],
154
166
  },
155
167
  ];
156
168
  };
@@ -30,7 +30,7 @@ export interface LibraryUiElement {
30
30
  * Elastic query used to count this library's model downloads
31
31
  *
32
32
  * By default, those files are counted:
33
- * "config.json", "config.yaml", "hyperparams.yaml", "meta.yaml"
33
+ * "config.json", "config.yaml", "hyperparams.yaml", "params.json", "meta.yaml"
34
34
  */
35
35
  countDownloads?: ElasticSearchQuery;
36
36
  /**
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@huggingface/tasks",
3
- "version": "0.19.30",
3
+ "version": "0.19.32",
4
4
  "description": "List of ML tasks for huggingface.co/tasks",
5
5
  "repository": "https://github.com/huggingface/huggingface.js.git",
6
6
  "publishConfig": {
package/src/local-apps.ts CHANGED
@@ -207,10 +207,27 @@ curl -X POST "http://localhost:8000/v1/completions" \\
207
207
  "temperature": 0.5
208
208
  }'`;
209
209
  const runCommand = model.tags.includes("conversational") ? runCommandInstruct : runCommandNonInstruct;
210
+
211
+ let setup;
212
+ let dockerCommand;
213
+
214
+ if (model.tags.includes("mistral-common")) {
215
+ setup = [
216
+ "# Install vLLM from pip:",
217
+ "pip install vllm",
218
+ "# Make sure you have the latest version of mistral-common installed:",
219
+ "pip install --upgrade mistral-common",
220
+ ].join("\n");
221
+ dockerCommand = `# Load and run the model:\ndocker exec -it my_vllm_container bash -c "vllm serve ${model.id} --tokenizer_mode mistral --config_format mistral --load_format mistral --tool-call-parser mistral --enable-auto-tool-choice"`;
222
+ } else {
223
+ setup = ["# Install vLLM from pip:", "pip install vllm"].join("\n");
224
+ dockerCommand = `# Load and run the model:\ndocker exec -it my_vllm_container bash -c "vllm serve ${model.id}"`;
225
+ }
226
+
210
227
  return [
211
228
  {
212
229
  title: "Install from pip",
213
- setup: ["# Install vLLM from pip:", "pip install vllm"].join("\n"),
230
+ setup: setup,
214
231
  content: [`# Load and run the model:\nvllm serve "${model.id}"`, runCommand],
215
232
  },
216
233
  {
@@ -226,10 +243,7 @@ curl -X POST "http://localhost:8000/v1/completions" \\
226
243
  ` vllm/vllm-openai:latest \\`,
227
244
  ` --model ${model.id}`,
228
245
  ].join("\n"),
229
- content: [
230
- `# Load and run the model:\ndocker exec -it my_vllm_container bash -c "vllm serve ${model.id}"`,
231
- runCommand,
232
- ],
246
+ content: [dockerCommand, runCommand],
233
247
  },
234
248
  ];
235
249
  };
@@ -2023,4 +2023,5 @@ audio = model.autoencoder.decode(codes)[0].cpu()
2023
2023
  torchaudio.save("sample.wav", audio, model.autoencoder.sampling_rate)
2024
2024
  `,
2025
2025
  ];
2026
+
2026
2027
  //#endregion
@@ -32,7 +32,7 @@ export interface LibraryUiElement {
32
32
  * Elastic query used to count this library's model downloads
33
33
  *
34
34
  * By default, those files are counted:
35
- * "config.json", "config.yaml", "hyperparams.yaml", "meta.yaml"
35
+ * "config.json", "config.yaml", "hyperparams.yaml", "params.json", "meta.yaml"
36
36
  */
37
37
  countDownloads?: ElasticSearchQuery;
38
38
  /**