@huggingface/tasks 0.0.8 → 0.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/package.json +1 -1
- package/src/tasks/audio-classification/about.md +1 -1
- package/src/tasks/audio-to-audio/about.md +1 -1
- package/src/tasks/automatic-speech-recognition/about.md +1 -1
- package/src/tasks/conversational/about.md +1 -1
- package/src/tasks/image-classification/about.md +1 -1
- package/src/tasks/image-segmentation/about.md +1 -1
- package/src/tasks/image-to-image/about.md +1 -1
- package/src/tasks/image-to-text/about.md +1 -1
- package/src/tasks/summarization/about.md +1 -1
- package/src/tasks/text-classification/about.md +1 -1
- package/src/tasks/text-generation/about.md +1 -1
- package/src/tasks/text-to-image/about.md +1 -1
- package/src/tasks/text-to-speech/about.md +1 -1
- package/src/tasks/translation/about.md +1 -1
package/LICENSE
CHANGED
|
@@ -18,4 +18,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
18
18
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
19
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
20
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
-
SOFTWARE.
|
|
21
|
+
SOFTWARE.
|
package/package.json
CHANGED
|
@@ -53,7 +53,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
|
|
|
53
53
|
```javascript
|
|
54
54
|
import { HfInference } from "@huggingface/inference";
|
|
55
55
|
|
|
56
|
-
const inference = new HfInference(
|
|
56
|
+
const inference = new HfInference(HF_TOKEN);
|
|
57
57
|
await inference.audioClassification({
|
|
58
58
|
data: await (await fetch("sample.flac")).blob(),
|
|
59
59
|
model: "facebook/mms-lid-126",
|
|
@@ -35,7 +35,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
|
|
|
35
35
|
```javascript
|
|
36
36
|
import { HfInference } from "@huggingface/inference";
|
|
37
37
|
|
|
38
|
-
const inference = new HfInference(
|
|
38
|
+
const inference = new HfInference(HF_TOKEN);
|
|
39
39
|
await inference.audioToAudio({
|
|
40
40
|
data: await (await fetch("sample.flac")).blob(),
|
|
41
41
|
model: "speechbrain/sepformer-wham",
|
|
@@ -54,7 +54,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to t
|
|
|
54
54
|
```javascript
|
|
55
55
|
import { HfInference } from "@huggingface/inference";
|
|
56
56
|
|
|
57
|
-
const inference = new HfInference(
|
|
57
|
+
const inference = new HfInference(HF_TOKEN);
|
|
58
58
|
await inference.automaticSpeechRecognition({
|
|
59
59
|
data: await (await fetch("sample.flac")).blob(),
|
|
60
60
|
model: "openai/whisper-large-v2",
|
|
@@ -34,7 +34,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
|
|
|
34
34
|
```javascript
|
|
35
35
|
import { HfInference } from "@huggingface/inference";
|
|
36
36
|
|
|
37
|
-
const inference = new HfInference(
|
|
37
|
+
const inference = new HfInference(HF_TOKEN);
|
|
38
38
|
await inference.conversational({
|
|
39
39
|
model: "facebook/blenderbot-400M-distill",
|
|
40
40
|
inputs: "Going to the movies tonight - any suggestions?",
|
|
@@ -29,7 +29,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to c
|
|
|
29
29
|
```javascript
|
|
30
30
|
import { HfInference } from "@huggingface/inference";
|
|
31
31
|
|
|
32
|
-
const inference = new HfInference(
|
|
32
|
+
const inference = new HfInference(HF_TOKEN);
|
|
33
33
|
await inference.imageClassification({
|
|
34
34
|
data: await (await fetch("https://picsum.photos/300/300")).blob(),
|
|
35
35
|
model: "microsoft/resnet-50",
|
|
@@ -45,7 +45,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
|
|
|
45
45
|
```javascript
|
|
46
46
|
import { HfInference } from "@huggingface/inference";
|
|
47
47
|
|
|
48
|
-
const inference = new HfInference(
|
|
48
|
+
const inference = new HfInference(HF_TOKEN);
|
|
49
49
|
await inference.imageSegmentation({
|
|
50
50
|
data: await (await fetch("https://picsum.photos/300/300")).blob(),
|
|
51
51
|
model: "facebook/detr-resnet-50-panoptic",
|
|
@@ -43,7 +43,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
|
|
|
43
43
|
```javascript
|
|
44
44
|
import { HfInference } from "@huggingface/inference";
|
|
45
45
|
|
|
46
|
-
const inference = new HfInference(
|
|
46
|
+
const inference = new HfInference(HF_TOKEN);
|
|
47
47
|
await inference.imageToImage({
|
|
48
48
|
data: await (await fetch("image")).blob(),
|
|
49
49
|
model: "timbrooks/instruct-pix2pix",
|
|
@@ -48,7 +48,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
|
|
|
48
48
|
```javascript
|
|
49
49
|
import { HfInference } from "@huggingface/inference";
|
|
50
50
|
|
|
51
|
-
const inference = new HfInference(
|
|
51
|
+
const inference = new HfInference(HF_TOKEN);
|
|
52
52
|
await inference.imageToText({
|
|
53
53
|
data: await (await fetch("https://picsum.photos/300/300")).blob(),
|
|
54
54
|
model: "Salesforce/blip-image-captioning-base",
|
|
@@ -25,7 +25,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
|
|
|
25
25
|
```javascript
|
|
26
26
|
import { HfInference } from "@huggingface/inference";
|
|
27
27
|
|
|
28
|
-
const inference = new HfInference(
|
|
28
|
+
const inference = new HfInference(HF_TOKEN);
|
|
29
29
|
const inputs =
|
|
30
30
|
"Paris is the capital and most populous city of France, with an estimated population of 2,175,601 residents as of 2018, in an area of more than 105 square kilometres (41 square miles). The City of Paris is the centre and seat of government of the region and province of Île-de-France, or Paris Region, which has an estimated population of 12,174,880, or about 18 percent of the population of France as of 2017.";
|
|
31
31
|
|
|
@@ -117,7 +117,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
|
|
|
117
117
|
```javascript
|
|
118
118
|
import { HfInference } from "@huggingface/inference";
|
|
119
119
|
|
|
120
|
-
const inference = new HfInference(
|
|
120
|
+
const inference = new HfInference(HF_TOKEN);
|
|
121
121
|
await inference.conversational({
|
|
122
122
|
model: "distilbert-base-uncased-finetuned-sst-2-english",
|
|
123
123
|
inputs: "I love this movie!",
|
|
@@ -72,7 +72,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
|
|
|
72
72
|
```javascript
|
|
73
73
|
import { HfInference } from "@huggingface/inference";
|
|
74
74
|
|
|
75
|
-
const inference = new HfInference(
|
|
75
|
+
const inference = new HfInference(HF_TOKEN);
|
|
76
76
|
await inference.conversational({
|
|
77
77
|
model: "distilbert-base-uncased-finetuned-sst-2-english",
|
|
78
78
|
inputs: "I love this movie!",
|
|
@@ -41,7 +41,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
|
|
|
41
41
|
```javascript
|
|
42
42
|
import { HfInference } from "@huggingface/inference";
|
|
43
43
|
|
|
44
|
-
const inference = new HfInference(
|
|
44
|
+
const inference = new HfInference(HF_TOKEN);
|
|
45
45
|
await inference.textToImage({
|
|
46
46
|
model: "stabilityai/stable-diffusion-2",
|
|
47
47
|
inputs: "award winning high resolution photo of a giant tortoise/((ladybird)) hybrid, [trending on artstation]",
|
|
@@ -47,7 +47,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
|
|
|
47
47
|
```javascript
|
|
48
48
|
import { HfInference } from "@huggingface/inference";
|
|
49
49
|
|
|
50
|
-
const inference = new HfInference(
|
|
50
|
+
const inference = new HfInference(HF_TOKEN);
|
|
51
51
|
await inference.textToSpeech({
|
|
52
52
|
model: "facebook/mms-tts",
|
|
53
53
|
inputs: "text to generate speech from",
|
|
@@ -37,7 +37,7 @@ You can use [huggingface.js](https://github.com/huggingface/huggingface.js) to i
|
|
|
37
37
|
```javascript
|
|
38
38
|
import { HfInference } from "@huggingface/inference";
|
|
39
39
|
|
|
40
|
-
const inference = new HfInference(
|
|
40
|
+
const inference = new HfInference(HF_TOKEN);
|
|
41
41
|
await inference.translation({
|
|
42
42
|
model: "t5-base",
|
|
43
43
|
inputs: "My name is Wolfgang and I live in Berlin",
|