@huggingface/inference 2.7.0 → 2.7.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +14 -12
- package/dist/index.js +14 -12
- package/dist/src/tasks/nlp/textGeneration.d.ts.map +1 -1
- package/package.json +2 -2
- package/src/tasks/nlp/textGeneration.ts +7 -4
package/dist/index.cjs
CHANGED
|
@@ -740,12 +740,22 @@ async function textClassification(args, options) {
|
|
|
740
740
|
return res;
|
|
741
741
|
}
|
|
742
742
|
|
|
743
|
+
// src/utils/toArray.ts
|
|
744
|
+
function toArray(obj) {
|
|
745
|
+
if (Array.isArray(obj)) {
|
|
746
|
+
return obj;
|
|
747
|
+
}
|
|
748
|
+
return [obj];
|
|
749
|
+
}
|
|
750
|
+
|
|
743
751
|
// src/tasks/nlp/textGeneration.ts
|
|
744
752
|
async function textGeneration(args, options) {
|
|
745
|
-
const res =
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
753
|
+
const res = toArray(
|
|
754
|
+
await request(args, {
|
|
755
|
+
...options,
|
|
756
|
+
taskHint: "text-generation"
|
|
757
|
+
})
|
|
758
|
+
);
|
|
749
759
|
const isValidOutput = Array.isArray(res) && res.every((x) => typeof x?.generated_text === "string");
|
|
750
760
|
if (!isValidOutput) {
|
|
751
761
|
throw new InferenceOutputError("Expected Array<{generated_text: string}>");
|
|
@@ -761,14 +771,6 @@ async function* textGenerationStream(args, options) {
|
|
|
761
771
|
});
|
|
762
772
|
}
|
|
763
773
|
|
|
764
|
-
// src/utils/toArray.ts
|
|
765
|
-
function toArray(obj) {
|
|
766
|
-
if (Array.isArray(obj)) {
|
|
767
|
-
return obj;
|
|
768
|
-
}
|
|
769
|
-
return [obj];
|
|
770
|
-
}
|
|
771
|
-
|
|
772
774
|
// src/tasks/nlp/tokenClassification.ts
|
|
773
775
|
async function tokenClassification(args, options) {
|
|
774
776
|
const res = toArray(
|
package/dist/index.js
CHANGED
|
@@ -687,12 +687,22 @@ async function textClassification(args, options) {
|
|
|
687
687
|
return res;
|
|
688
688
|
}
|
|
689
689
|
|
|
690
|
+
// src/utils/toArray.ts
|
|
691
|
+
function toArray(obj) {
|
|
692
|
+
if (Array.isArray(obj)) {
|
|
693
|
+
return obj;
|
|
694
|
+
}
|
|
695
|
+
return [obj];
|
|
696
|
+
}
|
|
697
|
+
|
|
690
698
|
// src/tasks/nlp/textGeneration.ts
|
|
691
699
|
async function textGeneration(args, options) {
|
|
692
|
-
const res =
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
700
|
+
const res = toArray(
|
|
701
|
+
await request(args, {
|
|
702
|
+
...options,
|
|
703
|
+
taskHint: "text-generation"
|
|
704
|
+
})
|
|
705
|
+
);
|
|
696
706
|
const isValidOutput = Array.isArray(res) && res.every((x) => typeof x?.generated_text === "string");
|
|
697
707
|
if (!isValidOutput) {
|
|
698
708
|
throw new InferenceOutputError("Expected Array<{generated_text: string}>");
|
|
@@ -708,14 +718,6 @@ async function* textGenerationStream(args, options) {
|
|
|
708
718
|
});
|
|
709
719
|
}
|
|
710
720
|
|
|
711
|
-
// src/utils/toArray.ts
|
|
712
|
-
function toArray(obj) {
|
|
713
|
-
if (Array.isArray(obj)) {
|
|
714
|
-
return obj;
|
|
715
|
-
}
|
|
716
|
-
return [obj];
|
|
717
|
-
}
|
|
718
|
-
|
|
719
721
|
// src/tasks/nlp/tokenClassification.ts
|
|
720
722
|
async function tokenClassification(args, options) {
|
|
721
723
|
const res = toArray(
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"textGeneration.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/textGeneration.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,mBAAmB,EAAE,oBAAoB,EAAE,MAAM,oBAAoB,CAAC;AAEpF,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;
|
|
1
|
+
{"version":3,"file":"textGeneration.d.ts","sourceRoot":"","sources":["../../../../src/tasks/nlp/textGeneration.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,mBAAmB,EAAE,oBAAoB,EAAE,MAAM,oBAAoB,CAAC;AAEpF,OAAO,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AAIrD,YAAY,EAAE,mBAAmB,EAAE,oBAAoB,EAAE,CAAC;AAE1D;;GAEG;AACH,wBAAsB,cAAc,CACnC,IAAI,EAAE,QAAQ,GAAG,mBAAmB,EACpC,OAAO,CAAC,EAAE,OAAO,GACf,OAAO,CAAC,oBAAoB,CAAC,CAY/B"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@huggingface/inference",
|
|
3
|
-
"version": "2.7.
|
|
3
|
+
"version": "2.7.1",
|
|
4
4
|
"packageManager": "pnpm@8.10.5",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"author": "Tim Mikeladze <tim.mikeladze@gmail.com>",
|
|
@@ -39,7 +39,7 @@
|
|
|
39
39
|
},
|
|
40
40
|
"type": "module",
|
|
41
41
|
"dependencies": {
|
|
42
|
-
"@huggingface/tasks": "^0.
|
|
42
|
+
"@huggingface/tasks": "^0.11.2"
|
|
43
43
|
},
|
|
44
44
|
"devDependencies": {
|
|
45
45
|
"@types/node": "18.13.0"
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import type { TextGenerationInput, TextGenerationOutput } from "@huggingface/tasks";
|
|
2
2
|
import { InferenceOutputError } from "../../lib/InferenceOutputError";
|
|
3
3
|
import type { BaseArgs, Options } from "../../types";
|
|
4
|
+
import { toArray } from "../../utils/toArray";
|
|
4
5
|
import { request } from "../custom/request";
|
|
5
6
|
|
|
6
7
|
export type { TextGenerationInput, TextGenerationOutput };
|
|
@@ -12,10 +13,12 @@ export async function textGeneration(
|
|
|
12
13
|
args: BaseArgs & TextGenerationInput,
|
|
13
14
|
options?: Options
|
|
14
15
|
): Promise<TextGenerationOutput> {
|
|
15
|
-
const res =
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
16
|
+
const res = toArray(
|
|
17
|
+
await request<TextGenerationOutput | TextGenerationOutput[]>(args, {
|
|
18
|
+
...options,
|
|
19
|
+
taskHint: "text-generation",
|
|
20
|
+
})
|
|
21
|
+
);
|
|
19
22
|
const isValidOutput = Array.isArray(res) && res.every((x) => typeof x?.generated_text === "string");
|
|
20
23
|
if (!isValidOutput) {
|
|
21
24
|
throw new InferenceOutputError("Expected Array<{generated_text: string}>");
|