@huggingface/inference 1.6.1 → 1.6.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js
CHANGED
package/dist/index.mjs
CHANGED
package/package.json
CHANGED
package/src/HfInference.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { toArray } from "./utils";
|
|
1
|
+
import { toArray } from "./utils/to-array";
|
|
2
2
|
|
|
3
3
|
export interface Options {
|
|
4
4
|
/**
|
|
@@ -526,7 +526,7 @@ export class HfInference {
|
|
|
526
526
|
* This task is well known to summarize longer text into shorter text. Be careful, some models have a maximum length of input. That means that the summary cannot handle full books for instance. Be careful when choosing your model.
|
|
527
527
|
*/
|
|
528
528
|
public async summarization(args: SummarizationArgs, options?: Options): Promise<SummarizationReturn> {
|
|
529
|
-
return (await this.request(args, options))?.[0];
|
|
529
|
+
return (await this.request<SummarizationReturn[]>(args, options))?.[0];
|
|
530
530
|
}
|
|
531
531
|
|
|
532
532
|
/**
|
|
@@ -550,14 +550,14 @@ export class HfInference {
|
|
|
550
550
|
* Usually used for sentiment-analysis this will output the likelihood of classes of an input. Recommended model: distilbert-base-uncased-finetuned-sst-2-english
|
|
551
551
|
*/
|
|
552
552
|
public async textClassification(args: TextClassificationArgs, options?: Options): Promise<TextClassificationReturn> {
|
|
553
|
-
return (await this.request(args, options))?.[0];
|
|
553
|
+
return (await this.request<TextClassificationReturn[]>(args, options))?.[0];
|
|
554
554
|
}
|
|
555
555
|
|
|
556
556
|
/**
|
|
557
557
|
* Use to continue text from a prompt. This is a very generic task. Recommended model: gpt2 (it’s a simple model, but fun to play with).
|
|
558
558
|
*/
|
|
559
559
|
public async textGeneration(args: TextGenerationArgs, options?: Options): Promise<TextGenerationReturn> {
|
|
560
|
-
return (await this.request(args, options))?.[0];
|
|
560
|
+
return (await this.request<TextGenerationReturn[]>(args, options))?.[0];
|
|
561
561
|
}
|
|
562
562
|
|
|
563
563
|
/**
|
|
@@ -574,7 +574,7 @@ export class HfInference {
|
|
|
574
574
|
* This task is well known to translate text from one language to another. Recommended model: Helsinki-NLP/opus-mt-ru-en.
|
|
575
575
|
*/
|
|
576
576
|
public async translation(args: TranslationArgs, options?: Options): Promise<TranslationReturn> {
|
|
577
|
-
return (await this.request(args, options))?.[0];
|
|
577
|
+
return (await this.request<TranslationReturn[]>(args, options))?.[0];
|
|
578
578
|
}
|
|
579
579
|
|
|
580
580
|
/**
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
const isBrowser = typeof window !== "undefined" && typeof window.document !== "undefined";
|
|
2
|
+
|
|
3
|
+
const isWebWorker =
|
|
4
|
+
typeof self === "object" && self.constructor && self.constructor.name === "DedicatedWorkerGlobalScope";
|
|
5
|
+
|
|
6
|
+
export const isFrontend = isBrowser || isWebWorker;
|
|
7
|
+
export const isBackend = !isBrowser && !isWebWorker;
|
|
File without changes
|