@huggingface/transformers 4.0.0-next.3 → 4.0.0-next.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -3
- package/dist/transformers.js +18 -1
- package/dist/transformers.min.js +18 -18
- package/dist/transformers.node.cjs +21 -1
- package/dist/transformers.node.min.cjs +18 -18
- package/dist/transformers.node.min.mjs +18 -18
- package/dist/transformers.node.mjs +18 -1
- package/dist/transformers.web.js +18 -1
- package/dist/transformers.web.min.js +14 -14
- package/package.json +1 -1
- package/src/configs.js +1 -0
- package/src/env.js +1 -1
- package/src/models/cohere2/modeling_cohere2.js +5 -0
- package/src/models/models.js +1 -0
- package/src/models/registry.js +2 -0
- package/src/transformers.js +2 -0
- package/types/configs.d.ts.map +1 -1
- package/types/models/cohere2/modeling_cohere2.d.ts +8 -0
- package/types/models/cohere2/modeling_cohere2.d.ts.map +1 -0
- package/types/models/models.d.ts +1 -0
- package/types/models/registry.d.ts.map +1 -1
- package/types/transformers.d.ts +2 -0
- package/types/transformers.d.ts.map +1 -1
package/README.md
CHANGED
|
@@ -47,7 +47,7 @@ npm i @huggingface/transformers
|
|
|
47
47
|
Alternatively, you can use it in vanilla JS, without any bundler, by using a CDN or static hosting. For example, using [ES Modules](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Modules), you can import the library with:
|
|
48
48
|
```html
|
|
49
49
|
<script type="module">
|
|
50
|
-
import { pipeline } from 'https://cdn.jsdelivr.net/npm/@huggingface/transformers@4.0.0-next.
|
|
50
|
+
import { pipeline } from 'https://cdn.jsdelivr.net/npm/@huggingface/transformers@4.0.0-next.4';
|
|
51
51
|
</script>
|
|
52
52
|
```
|
|
53
53
|
|
|
@@ -134,7 +134,7 @@ Ready to dive in? Explore our wide variety of demo applications and templates [h
|
|
|
134
134
|
|
|
135
135
|
|
|
136
136
|
|
|
137
|
-
By default, Transformers.js uses [hosted pretrained models](https://huggingface.co/models?library=transformers.js) and [precompiled WASM binaries](https://cdn.jsdelivr.net/npm/@huggingface/transformers@4.0.0-next.
|
|
137
|
+
By default, Transformers.js uses [hosted pretrained models](https://huggingface.co/models?library=transformers.js) and [precompiled WASM binaries](https://cdn.jsdelivr.net/npm/@huggingface/transformers@4.0.0-next.4/dist/), which should work out-of-the-box. You can customize this as follows:
|
|
138
138
|
|
|
139
139
|
### Settings
|
|
140
140
|
|
|
@@ -256,7 +256,8 @@ To find compatible models on the Hub, select the "transformers.js" library tag i
|
|
|
256
256
|
1. **[CLIPSeg](https://huggingface.co/docs/transformers/model_doc/clipseg)** (from University of Göttingen) released with the paper [Image Segmentation Using Text and Image Prompts](https://huggingface.co/papers/2112.10003) by Timo Lüddecke and Alexander Ecker.
|
|
257
257
|
1. **[CodeGen](https://huggingface.co/docs/transformers/model_doc/codegen)** (from Salesforce) released with the paper [A Conversational Paradigm for Program Synthesis](https://huggingface.co/papers/2203.13474) by Erik Nijkamp, Bo Pang, Hiroaki Hayashi, Lifu Tu, Huan Wang, Yingbo Zhou, Silvio Savarese, Caiming Xiong.
|
|
258
258
|
1. **[CodeLlama](https://huggingface.co/docs/transformers/model_doc/llama_code)** (from MetaAI) released with the paper [Code Llama: Open Foundation Models for Code](https://ai.meta.com/research/publications/code-llama-open-foundation-models-for-code/) by Baptiste Rozière, Jonas Gehring, Fabian Gloeckle, Sten Sootla, Itai Gat, Xiaoqing Ellen Tan, Yossi Adi, Jingyu Liu, Tal Remez, Jérémy Rapin, Artyom Kozhevnikov, Ivan Evtimov, Joanna Bitton, Manish Bhatt, Cristian Canton Ferrer, Aaron Grattafiori, Wenhan Xiong, Alexandre Défossez, Jade Copet, Faisal Azhar, Hugo Touvron, Louis Martin, Nicolas Usunier, Thomas Scialom, Gabriel Synnaeve.
|
|
259
|
-
1. **[Cohere](https://huggingface.co/docs/transformers/main/model_doc/cohere)** (from Cohere) released with the
|
|
259
|
+
1. **[Cohere](https://huggingface.co/docs/transformers/main/model_doc/cohere)** (from Cohere) released with the blog post [Command-R: Retrieval Augmented Generation at Production Scale](https://cohere.com/blog/command-r) by Cohere.
|
|
260
|
+
1. **[Cohere2](https://huggingface.co/docs/transformers/main/model_doc/cohere2)** (from Cohere) released with the blog post [Introducing Command R7B: Fast and efficient generative AI](https://cohere.com/blog/command-r7b) by Cohere.
|
|
260
261
|
1. **[ConvBERT](https://huggingface.co/docs/transformers/model_doc/convbert)** (from YituTech) released with the paper [ConvBERT: Improving BERT with Span-based Dynamic Convolution](https://huggingface.co/papers/2008.02496) by Zihang Jiang, Weihao Yu, Daquan Zhou, Yunpeng Chen, Jiashi Feng, Shuicheng Yan.
|
|
261
262
|
1. **[ConvNeXT](https://huggingface.co/docs/transformers/model_doc/convnext)** (from Facebook AI) released with the paper [A ConvNet for the 2020s](https://huggingface.co/papers/2201.03545) by Zhuang Liu, Hanzi Mao, Chao-Yuan Wu, Christoph Feichtenhofer, Trevor Darrell, Saining Xie.
|
|
262
263
|
1. **[ConvNeXTV2](https://huggingface.co/docs/transformers/model_doc/convnextv2)** (from Facebook AI) released with the paper [ConvNeXt V2: Co-designing and Scaling ConvNets with Masked Autoencoders](https://huggingface.co/papers/2301.00808) by Sanghyun Woo, Shoubhik Debnath, Ronghang Hu, Xinlei Chen, Zhuang Liu, In So Kweon, Saining Xie.
|
package/dist/transformers.js
CHANGED
|
@@ -23,7 +23,7 @@ var emptyObj3 = {};
|
|
|
23
23
|
var node_url_default = emptyObj3;
|
|
24
24
|
|
|
25
25
|
// src/env.js
|
|
26
|
-
var VERSION = "4.0.0-next.
|
|
26
|
+
var VERSION = "4.0.0-next.4";
|
|
27
27
|
var IS_PROCESS_AVAILABLE = typeof process !== "undefined";
|
|
28
28
|
var IS_NODE_ENV = IS_PROCESS_AVAILABLE && process?.release?.name === "node";
|
|
29
29
|
var IS_FS_AVAILABLE = !isEmpty(node_fs_default);
|
|
@@ -20774,6 +20774,7 @@ function getNormalizedConfig(config) {
|
|
|
20774
20774
|
case "granite":
|
|
20775
20775
|
case "granitemoehybrid":
|
|
20776
20776
|
case "cohere":
|
|
20777
|
+
case "cohere2":
|
|
20777
20778
|
case "mistral":
|
|
20778
20779
|
case "starcoder2":
|
|
20779
20780
|
case "qwen2":
|
|
@@ -23853,6 +23854,9 @@ __export(models_exports, {
|
|
|
23853
23854
|
CodeGenForCausalLM: () => CodeGenForCausalLM,
|
|
23854
23855
|
CodeGenModel: () => CodeGenModel,
|
|
23855
23856
|
CodeGenPreTrainedModel: () => CodeGenPreTrainedModel,
|
|
23857
|
+
Cohere2ForCausalLM: () => Cohere2ForCausalLM,
|
|
23858
|
+
Cohere2Model: () => Cohere2Model,
|
|
23859
|
+
Cohere2PreTrainedModel: () => Cohere2PreTrainedModel,
|
|
23856
23860
|
CohereForCausalLM: () => CohereForCausalLM,
|
|
23857
23861
|
CohereModel: () => CohereModel,
|
|
23858
23862
|
CoherePreTrainedModel: () => CoherePreTrainedModel,
|
|
@@ -24850,6 +24854,14 @@ var CohereModel = class extends CoherePreTrainedModel {
|
|
|
24850
24854
|
var CohereForCausalLM = class extends CoherePreTrainedModel {
|
|
24851
24855
|
};
|
|
24852
24856
|
|
|
24857
|
+
// src/models/cohere2/modeling_cohere2.js
|
|
24858
|
+
var Cohere2PreTrainedModel = class extends PreTrainedModel {
|
|
24859
|
+
};
|
|
24860
|
+
var Cohere2Model = class extends Cohere2PreTrainedModel {
|
|
24861
|
+
};
|
|
24862
|
+
var Cohere2ForCausalLM = class extends Cohere2PreTrainedModel {
|
|
24863
|
+
};
|
|
24864
|
+
|
|
24853
24865
|
// src/models/convbert/modeling_convbert.js
|
|
24854
24866
|
var ConvBertPreTrainedModel = class extends PreTrainedModel {
|
|
24855
24867
|
};
|
|
@@ -28654,6 +28666,7 @@ var MODEL_MAPPING_NAMES_DECODER_ONLY = /* @__PURE__ */ new Map([
|
|
|
28654
28666
|
["granite", "GraniteModel"],
|
|
28655
28667
|
["granitemoehybrid", "GraniteMoeHybridModel"],
|
|
28656
28668
|
["cohere", "CohereModel"],
|
|
28669
|
+
["cohere2", "Cohere2Model"],
|
|
28657
28670
|
["gemma", "GemmaModel"],
|
|
28658
28671
|
["gemma2", "Gemma2Model"],
|
|
28659
28672
|
["vaultgemma", "VaultGemmaModel"],
|
|
@@ -28767,6 +28780,7 @@ var MODEL_FOR_CAUSAL_LM_MAPPING_NAMES = /* @__PURE__ */ new Map([
|
|
|
28767
28780
|
["granite", "GraniteForCausalLM"],
|
|
28768
28781
|
["granitemoehybrid", "GraniteMoeHybridForCausalLM"],
|
|
28769
28782
|
["cohere", "CohereForCausalLM"],
|
|
28783
|
+
["cohere2", "Cohere2ForCausalLM"],
|
|
28770
28784
|
["gemma", "GemmaForCausalLM"],
|
|
28771
28785
|
["gemma2", "Gemma2ForCausalLM"],
|
|
28772
28786
|
["vaultgemma", "VaultGemmaForCausalLM"],
|
|
@@ -31278,6 +31292,9 @@ export {
|
|
|
31278
31292
|
CodeGenPreTrainedModel,
|
|
31279
31293
|
CodeGenTokenizer,
|
|
31280
31294
|
CodeLlamaTokenizer,
|
|
31295
|
+
Cohere2ForCausalLM,
|
|
31296
|
+
Cohere2Model,
|
|
31297
|
+
Cohere2PreTrainedModel,
|
|
31281
31298
|
CohereForCausalLM,
|
|
31282
31299
|
CohereModel,
|
|
31283
31300
|
CoherePreTrainedModel,
|