@huggingface/tasks 0.11.11 → 0.11.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +146 -17
- package/dist/index.js +146 -17
- package/dist/src/dataset-libraries.d.ts +6 -0
- package/dist/src/dataset-libraries.d.ts.map +1 -1
- package/dist/src/hardware.d.ts +8 -0
- package/dist/src/hardware.d.ts.map +1 -1
- package/dist/src/model-libraries-snippets.d.ts +3 -0
- package/dist/src/model-libraries-snippets.d.ts.map +1 -1
- package/dist/src/model-libraries.d.ts +34 -2
- package/dist/src/model-libraries.d.ts.map +1 -1
- package/dist/src/pipelines.d.ts +12 -2
- package/dist/src/pipelines.d.ts.map +1 -1
- package/dist/src/snippets/curl.d.ts.map +1 -1
- package/dist/src/snippets/js.d.ts.map +1 -1
- package/dist/src/snippets/python.d.ts.map +1 -1
- package/dist/src/tasks/audio-classification/inference.d.ts +3 -2
- package/dist/src/tasks/audio-classification/inference.d.ts.map +1 -1
- package/dist/src/tasks/automatic-speech-recognition/inference.d.ts +3 -2
- package/dist/src/tasks/automatic-speech-recognition/inference.d.ts.map +1 -1
- package/dist/src/tasks/image-classification/inference.d.ts +3 -2
- package/dist/src/tasks/image-classification/inference.d.ts.map +1 -1
- package/dist/src/tasks/image-segmentation/inference.d.ts +10 -6
- package/dist/src/tasks/image-segmentation/inference.d.ts.map +1 -1
- package/dist/src/tasks/image-to-image/inference.d.ts +6 -5
- package/dist/src/tasks/image-to-image/inference.d.ts.map +1 -1
- package/dist/src/tasks/index.d.ts +1 -1
- package/dist/src/tasks/index.d.ts.map +1 -1
- package/dist/src/tasks/keypoint-detection/data.d.ts +4 -0
- package/dist/src/tasks/keypoint-detection/data.d.ts.map +1 -0
- package/dist/src/tasks/object-detection/inference.d.ts +17 -4
- package/dist/src/tasks/object-detection/inference.d.ts.map +1 -1
- package/dist/src/tasks/summarization/inference.d.ts +13 -12
- package/dist/src/tasks/summarization/inference.d.ts.map +1 -1
- package/dist/src/tasks/text-to-image/inference.d.ts +2 -2
- package/dist/src/tasks/translation/inference.d.ts +21 -10
- package/dist/src/tasks/translation/inference.d.ts.map +1 -1
- package/package.json +1 -1
- package/src/dataset-libraries.ts +6 -0
- package/src/hardware.ts +8 -0
- package/src/local-apps.ts +1 -1
- package/src/model-libraries-snippets.ts +87 -6
- package/src/model-libraries.ts +32 -0
- package/src/pipelines.ts +12 -0
- package/src/snippets/curl.ts +3 -6
- package/src/snippets/js.ts +1 -2
- package/src/snippets/python.ts +1 -2
- package/src/tasks/audio-classification/inference.ts +3 -2
- package/src/tasks/audio-classification/spec/input.json +2 -1
- package/src/tasks/audio-classification/spec/output.json +1 -0
- package/src/tasks/automatic-speech-recognition/inference.ts +3 -2
- package/src/tasks/automatic-speech-recognition/spec/input.json +2 -1
- package/src/tasks/common-definitions.json +3 -20
- package/src/tasks/image-classification/inference.ts +3 -2
- package/src/tasks/image-classification/spec/input.json +2 -1
- package/src/tasks/image-classification/spec/output.json +1 -0
- package/src/tasks/image-segmentation/inference.ts +10 -6
- package/src/tasks/image-segmentation/spec/input.json +3 -12
- package/src/tasks/image-segmentation/spec/output.json +4 -3
- package/src/tasks/image-to-image/inference.ts +6 -5
- package/src/tasks/image-to-image/spec/input.json +3 -2
- package/src/tasks/image-to-image/spec/output.json +1 -1
- package/src/tasks/index.ts +3 -6
- package/src/tasks/keypoint-detection/about.md +59 -0
- package/src/tasks/keypoint-detection/data.ts +46 -0
- package/src/tasks/object-detection/inference.ts +17 -4
- package/src/tasks/object-detection/spec/input.json +2 -1
- package/src/tasks/object-detection/spec/output.json +10 -6
- package/src/tasks/summarization/inference.ts +13 -12
- package/src/tasks/summarization/spec/input.json +37 -2
- package/src/tasks/text-classification/spec/output.json +1 -0
- package/src/tasks/text-to-image/inference.ts +2 -2
- package/src/tasks/text-to-image/spec/input.json +1 -1
- package/src/tasks/text-to-image/spec/output.json +1 -1
- package/src/tasks/translation/inference.ts +21 -10
- package/src/tasks/translation/spec/input.json +45 -2
- package/src/tasks/zero-shot-classification/spec/output.json +1 -0
|
@@ -146,6 +146,13 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
146
146
|
filter: false;
|
|
147
147
|
countDownloads: string;
|
|
148
148
|
};
|
|
149
|
+
deepforest: {
|
|
150
|
+
prettyLabel: string;
|
|
151
|
+
repoName: string;
|
|
152
|
+
docsUrl: string;
|
|
153
|
+
repoUrl: string;
|
|
154
|
+
countDownloads: string;
|
|
155
|
+
};
|
|
149
156
|
"depth-anything-v2": {
|
|
150
157
|
prettyLabel: string;
|
|
151
158
|
repoName: string;
|
|
@@ -169,11 +176,29 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
169
176
|
snippets: (model: ModelData) => string[];
|
|
170
177
|
filter: true;
|
|
171
178
|
};
|
|
179
|
+
diffusionkit: {
|
|
180
|
+
prettyLabel: string;
|
|
181
|
+
repoName: string;
|
|
182
|
+
repoUrl: string;
|
|
183
|
+
snippets: (model: ModelData) => string[];
|
|
184
|
+
};
|
|
172
185
|
doctr: {
|
|
173
186
|
prettyLabel: string;
|
|
174
187
|
repoName: string;
|
|
175
188
|
repoUrl: string;
|
|
176
189
|
};
|
|
190
|
+
cartesia_pytorch: {
|
|
191
|
+
prettyLabel: string;
|
|
192
|
+
repoName: string;
|
|
193
|
+
repoUrl: string;
|
|
194
|
+
snippets: (model: ModelData) => string[];
|
|
195
|
+
};
|
|
196
|
+
cartesia_mlx: {
|
|
197
|
+
prettyLabel: string;
|
|
198
|
+
repoName: string;
|
|
199
|
+
repoUrl: string;
|
|
200
|
+
snippets: (model: ModelData) => string[];
|
|
201
|
+
};
|
|
177
202
|
edsnlp: {
|
|
178
203
|
prettyLabel: string;
|
|
179
204
|
repoName: string;
|
|
@@ -415,6 +440,13 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
415
440
|
snippets: (model: ModelData) => string[];
|
|
416
441
|
filter: true;
|
|
417
442
|
};
|
|
443
|
+
"py-feat": {
|
|
444
|
+
prettyLabel: string;
|
|
445
|
+
repoName: string;
|
|
446
|
+
repoUrl: string;
|
|
447
|
+
docsUrl: string;
|
|
448
|
+
filter: false;
|
|
449
|
+
};
|
|
418
450
|
pythae: {
|
|
419
451
|
prettyLabel: string;
|
|
420
452
|
repoName: string;
|
|
@@ -629,6 +661,6 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
629
661
|
};
|
|
630
662
|
};
|
|
631
663
|
export type ModelLibraryKey = keyof typeof MODEL_LIBRARIES_UI_ELEMENTS;
|
|
632
|
-
export declare const ALL_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "depth-anything-v2" | "diffree" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
|
|
633
|
-
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "depth-anything-v2" | "diffree" | "diffusers" | "doctr" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
|
|
664
|
+
export declare const ALL_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "deepforest" | "depth-anything-v2" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
|
|
665
|
+
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "asteroid" | "audiocraft" | "audioseal" | "bertopic" | "big_vision" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "deepforest" | "depth-anything-v2" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hunyuan-dit" | "keras" | "tf-keras" | "keras-nlp" | "k2" | "liveportrait" | "llama-cpp-python" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "saelens" | "sam2" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "stable-baselines3" | "stanza" | "tensorflowtts" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "unity-sentis" | "voicecraft" | "whisperkit")[];
|
|
634
666
|
//# sourceMappingURL=model-libraries.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B
|
|
1
|
+
{"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,6BAA6B,CAAC;AAEtE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAgmBI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,4mCAAgE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,4mCAQ1B,CAAC"}
|
package/dist/src/pipelines.d.ts
CHANGED
|
@@ -391,6 +391,16 @@ export declare const PIPELINE_DATA: {
|
|
|
391
391
|
name: string;
|
|
392
392
|
modality: "multimodal";
|
|
393
393
|
color: "blue";
|
|
394
|
+
hideInDatasets: false;
|
|
395
|
+
};
|
|
396
|
+
"keypoint-detection": {
|
|
397
|
+
name: string;
|
|
398
|
+
subtasks: {
|
|
399
|
+
type: string;
|
|
400
|
+
name: string;
|
|
401
|
+
}[];
|
|
402
|
+
modality: "cv";
|
|
403
|
+
color: "red";
|
|
394
404
|
hideInDatasets: true;
|
|
395
405
|
};
|
|
396
406
|
other: {
|
|
@@ -403,7 +413,7 @@ export declare const PIPELINE_DATA: {
|
|
|
403
413
|
};
|
|
404
414
|
export type PipelineType = keyof typeof PIPELINE_DATA;
|
|
405
415
|
export type WidgetType = PipelineType | "conversational";
|
|
406
|
-
export declare const PIPELINE_TYPES: ("other" | "text-classification" | "token-classification" | "table-question-answering" | "question-answering" | "zero-shot-classification" | "translation" | "summarization" | "feature-extraction" | "text-generation" | "text2text-generation" | "fill-mask" | "sentence-similarity" | "text-to-speech" | "text-to-audio" | "automatic-speech-recognition" | "audio-to-audio" | "audio-classification" | "voice-activity-detection" | "depth-estimation" | "image-classification" | "object-detection" | "image-segmentation" | "text-to-image" | "image-to-text" | "image-to-image" | "image-to-video" | "unconditional-image-generation" | "video-classification" | "reinforcement-learning" | "robotics" | "tabular-classification" | "tabular-regression" | "tabular-to-text" | "table-to-text" | "multiple-choice" | "text-retrieval" | "time-series-forecasting" | "text-to-video" | "image-text-to-text" | "visual-question-answering" | "document-question-answering" | "zero-shot-image-classification" | "graph-ml" | "mask-generation" | "zero-shot-object-detection" | "text-to-3d" | "image-to-3d" | "image-feature-extraction" | "video-text-to-text")[];
|
|
416
|
+
export declare const PIPELINE_TYPES: ("other" | "text-classification" | "token-classification" | "table-question-answering" | "question-answering" | "zero-shot-classification" | "translation" | "summarization" | "feature-extraction" | "text-generation" | "text2text-generation" | "fill-mask" | "sentence-similarity" | "text-to-speech" | "text-to-audio" | "automatic-speech-recognition" | "audio-to-audio" | "audio-classification" | "voice-activity-detection" | "depth-estimation" | "image-classification" | "object-detection" | "image-segmentation" | "text-to-image" | "image-to-text" | "image-to-image" | "image-to-video" | "unconditional-image-generation" | "video-classification" | "reinforcement-learning" | "robotics" | "tabular-classification" | "tabular-regression" | "tabular-to-text" | "table-to-text" | "multiple-choice" | "text-retrieval" | "time-series-forecasting" | "text-to-video" | "image-text-to-text" | "visual-question-answering" | "document-question-answering" | "zero-shot-image-classification" | "graph-ml" | "mask-generation" | "zero-shot-object-detection" | "text-to-3d" | "image-to-3d" | "image-feature-extraction" | "video-text-to-text" | "keypoint-detection")[];
|
|
407
417
|
export declare const SUBTASK_TYPES: string[];
|
|
408
|
-
export declare const PIPELINE_TYPES_SET: Set<"other" | "text-classification" | "token-classification" | "table-question-answering" | "question-answering" | "zero-shot-classification" | "translation" | "summarization" | "feature-extraction" | "text-generation" | "text2text-generation" | "fill-mask" | "sentence-similarity" | "text-to-speech" | "text-to-audio" | "automatic-speech-recognition" | "audio-to-audio" | "audio-classification" | "voice-activity-detection" | "depth-estimation" | "image-classification" | "object-detection" | "image-segmentation" | "text-to-image" | "image-to-text" | "image-to-image" | "image-to-video" | "unconditional-image-generation" | "video-classification" | "reinforcement-learning" | "robotics" | "tabular-classification" | "tabular-regression" | "tabular-to-text" | "table-to-text" | "multiple-choice" | "text-retrieval" | "time-series-forecasting" | "text-to-video" | "image-text-to-text" | "visual-question-answering" | "document-question-answering" | "zero-shot-image-classification" | "graph-ml" | "mask-generation" | "zero-shot-object-detection" | "text-to-3d" | "image-to-3d" | "image-feature-extraction" | "video-text-to-text">;
|
|
418
|
+
export declare const PIPELINE_TYPES_SET: Set<"other" | "text-classification" | "token-classification" | "table-question-answering" | "question-answering" | "zero-shot-classification" | "translation" | "summarization" | "feature-extraction" | "text-generation" | "text2text-generation" | "fill-mask" | "sentence-similarity" | "text-to-speech" | "text-to-audio" | "automatic-speech-recognition" | "audio-to-audio" | "audio-classification" | "voice-activity-detection" | "depth-estimation" | "image-classification" | "object-detection" | "image-segmentation" | "text-to-image" | "image-to-text" | "image-to-image" | "image-to-video" | "unconditional-image-generation" | "video-classification" | "reinforcement-learning" | "robotics" | "tabular-classification" | "tabular-regression" | "tabular-to-text" | "table-to-text" | "multiple-choice" | "text-retrieval" | "time-series-forecasting" | "text-to-video" | "image-text-to-text" | "visual-question-answering" | "document-question-answering" | "zero-shot-image-classification" | "graph-ml" | "mask-generation" | "zero-shot-object-detection" | "text-to-3d" | "image-to-3d" | "image-feature-extraction" | "video-text-to-text" | "keypoint-detection">;
|
|
409
419
|
//# sourceMappingURL=pipelines.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"pipelines.d.ts","sourceRoot":"","sources":["../../src/pipelines.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,UAAU,yEAA0E,CAAC;AAElG,MAAM,MAAM,QAAQ,GAAG,CAAC,OAAO,UAAU,CAAC,CAAC,MAAM,CAAC,CAAC;AAEnD,eAAO,MAAM,eAAe;;;;;;;;CAQQ,CAAC;AAErC;;;;;;GAMG;AACH,MAAM,WAAW,OAAO;IACvB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;CACb;AAED;;;;;GAKG;AACH,MAAM,WAAW,YAAY;IAC5B;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC;IACrB,QAAQ,EAAE,QAAQ,CAAC;IACnB;;OAEG;IACH,KAAK,EAAE,MAAM,GAAG,OAAO,GAAG,QAAQ,GAAG,QAAQ,GAAG,KAAK,GAAG,QAAQ,CAAC;IACjE;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;CACzB;AAcD,eAAO,MAAM,aAAa
|
|
1
|
+
{"version":3,"file":"pipelines.d.ts","sourceRoot":"","sources":["../../src/pipelines.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,UAAU,yEAA0E,CAAC;AAElG,MAAM,MAAM,QAAQ,GAAG,CAAC,OAAO,UAAU,CAAC,CAAC,MAAM,CAAC,CAAC;AAEnD,eAAO,MAAM,eAAe;;;;;;;;CAQQ,CAAC;AAErC;;;;;;GAMG;AACH,MAAM,WAAW,OAAO;IACvB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;CACb;AAED;;;;;GAKG;AACH,MAAM,WAAW,YAAY;IAC5B;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC;IACrB,QAAQ,EAAE,QAAQ,CAAC;IACnB;;OAEG;IACH,KAAK,EAAE,MAAM,GAAG,OAAO,GAAG,QAAQ,GAAG,QAAQ,GAAG,KAAK,GAAG,QAAQ,CAAC;IACjE;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;CACzB;AAcD,eAAO,MAAM,aAAa;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAgmBc,CAAC;AAEzC,MAAM,MAAM,YAAY,GAAG,MAAM,OAAO,aAAa,CAAC;AAEtD,MAAM,MAAM,UAAU,GAAG,YAAY,GAAG,gBAAgB,CAAC;AAEzD,eAAO,MAAM,cAAc,ioCAA+C,CAAC;AAE3E,eAAO,MAAM,aAAa,UAEN,CAAC;AAErB,eAAO,MAAM,kBAAkB,koCAA0B,CAAC"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"curl.d.ts","sourceRoot":"","sources":["../../../src/snippets/curl.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEpD,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAEnD,eAAO,MAAM,YAAY,UAAW,gBAAgB,eAAe,MAAM,KAAG,
|
|
1
|
+
{"version":3,"file":"curl.d.ts","sourceRoot":"","sources":["../../../src/snippets/curl.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEpD,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAEnD,eAAO,MAAM,YAAY,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAKhB,CAAC;AAE7D,eAAO,MAAM,qBAAqB,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAgBpF,CAAC;AAEF,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAKjC,CAAC;AAE7D,eAAO,MAAM,WAAW,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAIf,CAAC;AAE7D,eAAO,MAAM,YAAY,EAAE,OAAO,CAAC,MAAM,CAAC,YAAY,EAAE,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,KAAK,MAAM,CAAC,CAwBhH,CAAC;AAEF,wBAAgB,uBAAuB,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,CAI5F;AAED,wBAAgB,uBAAuB,CAAC,KAAK,EAAE,IAAI,CAAC,gBAAgB,EAAE,cAAc,CAAC,GAAG,OAAO,CAE9F"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"js.d.ts","sourceRoot":"","sources":["../../../src/snippets/js.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEpD,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAEnD,eAAO,MAAM,YAAY,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAmBxE,CAAC;AAEL,eAAO,MAAM,qBAAqB,UAAW,gBAAgB,eAAe,MAAM,KAAG,
|
|
1
|
+
{"version":3,"file":"js.d.ts","sourceRoot":"","sources":["../../../src/snippets/js.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEpD,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAEnD,eAAO,MAAM,YAAY,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAmBxE,CAAC;AAEL,eAAO,MAAM,qBAAqB,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAiBpF,CAAC;AACF,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAqBzF,CAAC;AAEL,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAkB9E,CAAC;AAEL,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAqCjF,CAAC;AAEF,eAAO,MAAM,WAAW,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAoBvE,CAAC;AAEL,eAAO,MAAM,UAAU,EAAE,OAAO,CAAC,MAAM,CAAC,YAAY,EAAE,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,KAAK,MAAM,CAAC,CAwB9G,CAAC;AAEF,wBAAgB,qBAAqB,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,CAI1F;AAED,wBAAgB,qBAAqB,CAAC,KAAK,EAAE,gBAAgB,GAAG,OAAO,CAEtE"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"python.d.ts","sourceRoot":"","sources":["../../../src/snippets/python.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEpD,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAEnD,eAAO,MAAM,qBAAqB,UAAW,gBAAgB,eAAe,MAAM,KAAG,
|
|
1
|
+
{"version":3,"file":"python.d.ts","sourceRoot":"","sources":["../../../src/snippets/python.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAEpD,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAEnD,eAAO,MAAM,qBAAqB,UAAW,gBAAgB,eAAe,MAAM,KAAG,MAajC,CAAC;AAErD,eAAO,MAAM,6BAA6B,UAAW,gBAAgB,KAAG,MAQrE,CAAC;AAEJ,eAAO,MAAM,kCAAkC,UAAW,gBAAgB,KAAG,MAc1E,CAAC;AAEJ,eAAO,MAAM,YAAY,UAAW,gBAAgB,KAAG,MAOpD,CAAC;AAEJ,eAAO,MAAM,WAAW,UAAW,gBAAgB,KAAG,MAOP,CAAC;AAEhD,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,KAAG,MAUjB,CAAC;AAE7C,eAAO,MAAM,cAAc,UAAW,gBAAgB,KAAG,MAMtD,CAAC;AAEJ,eAAO,MAAM,kBAAkB,UAAW,gBAAgB,KAAG,MA2B5D,CAAC;AAEF,eAAO,MAAM,gCAAgC,UAAW,gBAAgB,KAAG,MAUxE,CAAC;AAEJ,eAAO,MAAM,cAAc,EAAE,OAAO,CAAC,MAAM,CAAC,YAAY,EAAE,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,KAAK,MAAM,CAAC,CA4BlH,CAAC;AAEF,wBAAgB,yBAAyB,CAAC,KAAK,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,CAiB9F;AAED,wBAAgB,yBAAyB,CAAC,KAAK,EAAE,gBAAgB,GAAG,OAAO,CAE1E"}
|
|
@@ -8,9 +8,10 @@
|
|
|
8
8
|
*/
|
|
9
9
|
export interface AudioClassificationInput {
|
|
10
10
|
/**
|
|
11
|
-
* The input audio data
|
|
11
|
+
* The input audio data as a base64-encoded string. If no `parameters` are provided, you can
|
|
12
|
+
* also provide the audio data as a raw bytes payload.
|
|
12
13
|
*/
|
|
13
|
-
inputs:
|
|
14
|
+
inputs: string;
|
|
14
15
|
/**
|
|
15
16
|
* Additional inference parameters
|
|
16
17
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/audio-classification/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACxC
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/audio-classification/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACxC;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,6BAA6B,CAAC;IAC3C,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;;;GAIG;AACH,MAAM,WAAW,6BAA6B;IAC7C,iBAAiB,CAAC,EAAE,6BAA6B,CAAC;IAClD;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;GAEG;AACH,MAAM,MAAM,6BAA6B,GAAG,SAAS,GAAG,SAAS,GAAG,MAAM,CAAC;AAC3E,MAAM,MAAM,yBAAyB,GAAG,gCAAgC,EAAE,CAAC;AAC3E;;GAEG;AACH,MAAM,WAAW,gCAAgC;IAChD;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
|
@@ -8,9 +8,10 @@
|
|
|
8
8
|
*/
|
|
9
9
|
export interface AutomaticSpeechRecognitionInput {
|
|
10
10
|
/**
|
|
11
|
-
* The input audio data
|
|
11
|
+
* The input audio data as a base64-encoded string. If no `parameters` are provided, you can
|
|
12
|
+
* also provide the audio data as a raw bytes payload.
|
|
12
13
|
*/
|
|
13
|
-
inputs:
|
|
14
|
+
inputs: string;
|
|
14
15
|
/**
|
|
15
16
|
* Additional inference parameters
|
|
16
17
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/automatic-speech-recognition/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,+BAA+B;IAC/C
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/automatic-speech-recognition/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,+BAA+B;IAC/C;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,oCAAoC,CAAC;IAClD,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,oCAAoC;IACpD;;OAEG;IACH,QAAQ,CAAC,EAAE,oBAAoB,CAAC;IAChC;;OAEG;IACH,iBAAiB,CAAC,EAAE,OAAO,CAAC;IAC5B,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,oBAAoB;IACpC;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;;;OAKG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;;;;;;;OAQG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;;OAGG;IACH,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB;;OAEG;IACH,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;;OAGG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf;;;;;;OAMG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,MAAM,kBAAkB,GAAG,OAAO,GAAG,OAAO,CAAC;AAEnD;;GAEG;AACH,MAAM,WAAW,gCAAgC;IAChD;;;OAGG;IACH,MAAM,CAAC,EAAE,qCAAqC,EAAE,CAAC;IACjD;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED,MAAM,WAAW,qCAAqC;IACrD;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,UAAU,EAAE,MAAM,EAAE,CAAC;IACrB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
|
@@ -8,9 +8,10 @@
|
|
|
8
8
|
*/
|
|
9
9
|
export interface ImageClassificationInput {
|
|
10
10
|
/**
|
|
11
|
-
* The input image data
|
|
11
|
+
* The input image data as a base64-encoded string. If no `parameters` are provided, you can
|
|
12
|
+
* also provide the image data as a raw bytes payload.
|
|
12
13
|
*/
|
|
13
|
-
inputs:
|
|
14
|
+
inputs: string;
|
|
14
15
|
/**
|
|
15
16
|
* Additional inference parameters
|
|
16
17
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-classification/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACxC
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-classification/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACxC;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,6BAA6B,CAAC;IAC3C,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;;;GAIG;AACH,MAAM,WAAW,6BAA6B;IAC7C,iBAAiB,CAAC,EAAE,6BAA6B,CAAC;IAClD;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;GAEG;AACH,MAAM,MAAM,6BAA6B,GAAG,SAAS,GAAG,SAAS,GAAG,MAAM,CAAC;AAC3E,MAAM,MAAM,yBAAyB,GAAG,gCAAgC,EAAE,CAAC;AAC3E;;GAEG;AACH,MAAM,WAAW,gCAAgC;IAChD;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
|
@@ -8,9 +8,10 @@
|
|
|
8
8
|
*/
|
|
9
9
|
export interface ImageSegmentationInput {
|
|
10
10
|
/**
|
|
11
|
-
* The input image data
|
|
11
|
+
* The input image data as a base64-encoded string. If no `parameters` are provided, you can
|
|
12
|
+
* also provide the image data as a raw bytes payload.
|
|
12
13
|
*/
|
|
13
|
-
inputs:
|
|
14
|
+
inputs: string;
|
|
14
15
|
/**
|
|
15
16
|
* Additional inference parameters
|
|
16
17
|
*/
|
|
@@ -41,6 +42,9 @@ export interface ImageSegmentationParameters {
|
|
|
41
42
|
threshold?: number;
|
|
42
43
|
[property: string]: unknown;
|
|
43
44
|
}
|
|
45
|
+
/**
|
|
46
|
+
* Segmentation task to be performed, depending on model capabilities.
|
|
47
|
+
*/
|
|
44
48
|
export type ImageSegmentationSubtask = "instance" | "panoptic" | "semantic";
|
|
45
49
|
export type ImageSegmentationOutput = ImageSegmentationOutputElement[];
|
|
46
50
|
/**
|
|
@@ -50,15 +54,15 @@ export type ImageSegmentationOutput = ImageSegmentationOutputElement[];
|
|
|
50
54
|
*/
|
|
51
55
|
export interface ImageSegmentationOutputElement {
|
|
52
56
|
/**
|
|
53
|
-
* The label of the predicted segment
|
|
57
|
+
* The label of the predicted segment.
|
|
54
58
|
*/
|
|
55
59
|
label: string;
|
|
56
60
|
/**
|
|
57
|
-
* The corresponding mask as a black-and-white image
|
|
61
|
+
* The corresponding mask as a black-and-white image (base64-encoded).
|
|
58
62
|
*/
|
|
59
|
-
mask:
|
|
63
|
+
mask: string;
|
|
60
64
|
/**
|
|
61
|
-
* The score or confidence
|
|
65
|
+
* The score or confidence degree the model has.
|
|
62
66
|
*/
|
|
63
67
|
score?: number;
|
|
64
68
|
[property: string]: unknown;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-segmentation/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,sBAAsB;IACtC
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-segmentation/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,sBAAsB;IACtC;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,2BAA2B,CAAC;IACzC,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;;;GAIG;AACH,MAAM,WAAW,2BAA2B;IAC3C;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,2BAA2B,CAAC,EAAE,MAAM,CAAC;IACrC;;OAEG;IACH,OAAO,CAAC,EAAE,wBAAwB,CAAC;IACnC;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;GAEG;AACH,MAAM,MAAM,wBAAwB,GAAG,UAAU,GAAG,UAAU,GAAG,UAAU,CAAC;AAC5E,MAAM,MAAM,uBAAuB,GAAG,8BAA8B,EAAE,CAAC;AACvE;;;;GAIG;AACH,MAAM,WAAW,8BAA8B;IAC9C;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
|
@@ -8,9 +8,10 @@
|
|
|
8
8
|
*/
|
|
9
9
|
export interface ImageToImageInput {
|
|
10
10
|
/**
|
|
11
|
-
* The input image data
|
|
11
|
+
* The input image data as a base64-encoded string. If no `parameters` are provided, you can
|
|
12
|
+
* also provide the image data as a raw bytes payload.
|
|
12
13
|
*/
|
|
13
|
-
inputs:
|
|
14
|
+
inputs: string;
|
|
14
15
|
/**
|
|
15
16
|
* Additional inference parameters
|
|
16
17
|
*/
|
|
@@ -38,13 +39,13 @@ export interface ImageToImageParameters {
|
|
|
38
39
|
*/
|
|
39
40
|
num_inference_steps?: number;
|
|
40
41
|
/**
|
|
41
|
-
* The size in pixel of the output image
|
|
42
|
+
* The size in pixel of the output image.
|
|
42
43
|
*/
|
|
43
44
|
target_size?: TargetSize;
|
|
44
45
|
[property: string]: unknown;
|
|
45
46
|
}
|
|
46
47
|
/**
|
|
47
|
-
* The size in pixel of the output image
|
|
48
|
+
* The size in pixel of the output image.
|
|
48
49
|
*/
|
|
49
50
|
export interface TargetSize {
|
|
50
51
|
height: number;
|
|
@@ -56,7 +57,7 @@ export interface TargetSize {
|
|
|
56
57
|
*/
|
|
57
58
|
export interface ImageToImageOutput {
|
|
58
59
|
/**
|
|
59
|
-
* The output image
|
|
60
|
+
* The output image returned as raw bytes in the payload.
|
|
60
61
|
*/
|
|
61
62
|
image?: unknown;
|
|
62
63
|
[property: string]: unknown;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-to-image/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,iBAAiB;IACjC
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/image-to-image/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,iBAAiB;IACjC;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,sBAAsB,CAAC;IACpC,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,sBAAsB;IACtC;;;OAGG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,eAAe,CAAC,EAAE,MAAM,EAAE,CAAC;IAC3B;;;OAGG;IACH,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B;;OAEG;IACH,WAAW,CAAC,EAAE,UAAU,CAAC;IACzB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,WAAW,UAAU;IAC1B,MAAM,EAAE,MAAM,CAAC;IACf,KAAK,EAAE,MAAM,CAAC;IACd,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,WAAW,kBAAkB;IAClC;;OAEG;IACH,KAAK,CAAC,EAAE,OAAO,CAAC;IAChB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
|
@@ -18,7 +18,7 @@ export type * from "./table-question-answering/inference";
|
|
|
18
18
|
export type { TextToImageInput, TextToImageOutput, TextToImageParameters } from "./text-to-image/inference";
|
|
19
19
|
export type { TextToAudioParameters, TextToSpeechInput, TextToSpeechOutput } from "./text-to-speech/inference";
|
|
20
20
|
export type * from "./token-classification/inference";
|
|
21
|
-
export type {
|
|
21
|
+
export type { TranslationInput, TranslationOutput } from "./translation/inference";
|
|
22
22
|
export type { ClassificationOutputTransform, TextClassificationInput, TextClassificationOutput, TextClassificationOutputElement, TextClassificationParameters, } from "./text-classification/inference";
|
|
23
23
|
export type { TextGenerationOutputFinishReason, TextGenerationOutputPrefillToken, TextGenerationInput, TextGenerationOutput, TextGenerationOutputDetails, TextGenerationInputGenerateParameters, TextGenerationOutputBestOfSequence, TextGenerationOutputToken, TextGenerationStreamOutputStreamDetails, TextGenerationStreamOutput, } from "./text-generation/inference";
|
|
24
24
|
export type * from "./video-classification/inference";
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/tasks/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,cAAc,CAAC;AA0CjD,mBAAmB,kCAAkC,CAAC;AACtD,mBAAmB,0CAA0C,CAAC;AAC9D,YAAY,EACX,mBAAmB,EACnB,0BAA0B,EAC1B,oBAAoB,EACpB,4BAA4B,EAC5B,2BAA2B,EAC3B,0BAA0B,EAC1B,gCAAgC,EAChC,+BAA+B,GAC/B,MAAM,6BAA6B,CAAC;AACrC,mBAAmB,yCAAyC,CAAC;AAC7D,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,uBAAuB,CAAC;AAC3C,YAAY,EACX,wBAAwB,EACxB,yBAAyB,EACzB,gCAAgC,EAChC,6BAA6B,GAC7B,MAAM,kCAAkC,CAAC;AAC1C,mBAAmB,4BAA4B,CAAC;AAChD,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,2BAA2B,CAAC;AAC5G,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,8BAA8B,CAAC;AAClD,mBAAmB,8BAA8B,CAAC;AAClD,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,iCAAiC,CAAC;AACrD,mBAAmB,2BAA2B,CAAC;AAC/C,mBAAmB,sCAAsC,CAAC;AAC1D,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,2BAA2B,CAAC;AAC5G,YAAY,EAAE,qBAAqB,EAAE,iBAAiB,EAAE,kBAAkB,EAAE,MAAM,4BAA4B,CAAC;AAC/G,mBAAmB,kCAAkC,CAAC;AACtD,YAAY,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/tasks/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,cAAc,CAAC;AA0CjD,mBAAmB,kCAAkC,CAAC;AACtD,mBAAmB,0CAA0C,CAAC;AAC9D,YAAY,EACX,mBAAmB,EACnB,0BAA0B,EAC1B,oBAAoB,EACpB,4BAA4B,EAC5B,2BAA2B,EAC3B,0BAA0B,EAC1B,gCAAgC,EAChC,+BAA+B,GAC/B,MAAM,6BAA6B,CAAC;AACrC,mBAAmB,yCAAyC,CAAC;AAC7D,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,uBAAuB,CAAC;AAC3C,YAAY,EACX,wBAAwB,EACxB,yBAAyB,EACzB,gCAAgC,EAChC,6BAA6B,GAC7B,MAAM,kCAAkC,CAAC;AAC1C,mBAAmB,4BAA4B,CAAC;AAChD,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,2BAA2B,CAAC;AAC5G,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,8BAA8B,CAAC;AAClD,mBAAmB,8BAA8B,CAAC;AAClD,mBAAmB,gCAAgC,CAAC;AACpD,mBAAmB,iCAAiC,CAAC;AACrD,mBAAmB,2BAA2B,CAAC;AAC/C,mBAAmB,sCAAsC,CAAC;AAC1D,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,2BAA2B,CAAC;AAC5G,YAAY,EAAE,qBAAqB,EAAE,iBAAiB,EAAE,kBAAkB,EAAE,MAAM,4BAA4B,CAAC;AAC/G,mBAAmB,kCAAkC,CAAC;AACtD,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,yBAAyB,CAAC;AACnF,YAAY,EACX,6BAA6B,EAC7B,uBAAuB,EACvB,wBAAwB,EACxB,+BAA+B,EAC/B,4BAA4B,GAC5B,MAAM,iCAAiC,CAAC;AACzC,YAAY,EACX,gCAAgC,EAChC,gCAAgC,EAChC,mBAAmB,EACnB,oBAAoB,EACpB,2BAA2B,EAC3B,qCAAqC,EACrC,kCAAkC,EAClC,yBAAyB,EACzB,uCAAuC,EACvC,0BAA0B,GAC1B,MAAM,6BAA6B,CAAC;AACrC,mBAAmB,kCAAkC,CAAC;AACtD,mBAAmB,uCAAuC,CAAC;AAC3D,mBAAmB,sCAAsC,CAAC;AAC1D,mBAAmB,4CAA4C,CAAC;AAChE,YAAY,EACX,WAAW,EACX,4BAA4B,EAC5B,gCAAgC,EAChC,6BAA6B,EAC7B,oCAAoC,GACpC,MAAM,wCAAwC,CAAC;AAEhD,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AAE1D;;GAEG;AACH,eAAO,MAAM,qBAAqB,EAAE,MAAM,CAAC,YAAY,EAAE,eAAe,EAAE,CA4DzE,CAAC;AAoBF,eAAO,MAAM,UAAU,EAAE,MAAM,CAAC,YAAY,EAAE,QAAQ,GAAG,SAAS,CAoDxD,CAAC;AAEX,MAAM,WAAW,WAAW;IAC3B,WAAW,EAAE,MAAM,CAAC;IACpB,EAAE,EAAE,MAAM,CAAC;CACX;AAED,MAAM,MAAM,aAAa,GACtB;IACA,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,EAAE,OAAO,CAAC;CACb,GACD;IACA,IAAI,EAAE,KAAK,CAAC;QACX,KAAK,EAAE,MAAM,CAAC;QACd,KAAK,EAAE,MAAM,CAAC;KACd,CAAC,CAAC;IACH,IAAI,EAAE,OAAO,CAAC;CACb,GACD;IACA,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,EAAE,KAAK,CAAC;CACX,GACD;IACA,KAAK,EAAE,MAAM,EAAE,EAAE,CAAC;IAClB,IAAI,EAAE,SAAS,CAAC;CACf,GACD;IACA,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;CACZ,GACD;IACA,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,KAAK,CAAC;QACb,GAAG,EAAE,MAAM,CAAC;QACZ,KAAK,EAAE,MAAM,CAAC;QACd,IAAI,EAAE,MAAM,CAAC;KACb,CAAC,CAAC;IACH,IAAI,EAAE,kBAAkB,CAAC;CACxB,CAAC;AAEL,MAAM,WAAW,QAAQ;IACxB,MAAM,EAAE,aAAa,EAAE,CAAC;IACxB,OAAO,EAAE,aAAa,EAAE,CAAC;CACzB;AAED,MAAM,WAAW,QAAQ;IACxB,QAAQ,EAAE,WAAW,EAAE,CAAC;IACxB,IAAI,EAAE,QAAQ,CAAC;IACf,EAAE,EAAE,YAAY,CAAC;IACjB,WAAW,CAAC,EAAE,YAAY,CAAC;IAC3B,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,KAAK,EAAE,MAAM,CAAC;IACd,SAAS,EAAE,eAAe,EAAE,CAAC;IAC7B,OAAO,EAAE,WAAW,EAAE,CAAC;IACvB,MAAM,EAAE,WAAW,EAAE,CAAC;IACtB,MAAM,EAAE,WAAW,EAAE,CAAC;IACtB,OAAO,EAAE,MAAM,CAAC;IAChB,YAAY,EAAE,MAAM,EAAE,CAAC;IACvB,SAAS,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,MAAM,cAAc,GAAG,IAAI,CAAC,QAAQ,EAAE,IAAI,GAAG,OAAO,GAAG,WAAW,CAAC,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"data.d.ts","sourceRoot":"","sources":["../../../../src/tasks/keypoint-detection/data.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,IAAI,CAAC;AAEzC,QAAA,MAAM,QAAQ,EAAE,cAyCf,CAAC;AAEF,eAAe,QAAQ,CAAC"}
|
|
@@ -8,9 +8,10 @@
|
|
|
8
8
|
*/
|
|
9
9
|
export interface ObjectDetectionInput {
|
|
10
10
|
/**
|
|
11
|
-
* The input image data
|
|
11
|
+
* The input image data as a base64-encoded string. If no `parameters` are provided, you can
|
|
12
|
+
* also provide the image data as a raw bytes payload.
|
|
12
13
|
*/
|
|
13
|
-
inputs:
|
|
14
|
+
inputs: string;
|
|
14
15
|
/**
|
|
15
16
|
* Additional inference parameters
|
|
16
17
|
*/
|
|
@@ -34,9 +35,21 @@ export interface ObjectDetectionParameters {
|
|
|
34
35
|
* image.
|
|
35
36
|
*/
|
|
36
37
|
export interface BoundingBox {
|
|
38
|
+
/**
|
|
39
|
+
* The x-coordinate of the bottom-right corner of the bounding box.
|
|
40
|
+
*/
|
|
37
41
|
xmax: number;
|
|
42
|
+
/**
|
|
43
|
+
* The x-coordinate of the top-left corner of the bounding box.
|
|
44
|
+
*/
|
|
38
45
|
xmin: number;
|
|
46
|
+
/**
|
|
47
|
+
* The y-coordinate of the bottom-right corner of the bounding box.
|
|
48
|
+
*/
|
|
39
49
|
ymax: number;
|
|
50
|
+
/**
|
|
51
|
+
* The y-coordinate of the top-left corner of the bounding box.
|
|
52
|
+
*/
|
|
40
53
|
ymin: number;
|
|
41
54
|
[property: string]: unknown;
|
|
42
55
|
}
|
|
@@ -51,11 +64,11 @@ export interface ObjectDetectionOutputElement {
|
|
|
51
64
|
*/
|
|
52
65
|
box: BoundingBox;
|
|
53
66
|
/**
|
|
54
|
-
* The predicted label for the bounding box
|
|
67
|
+
* The predicted label for the bounding box.
|
|
55
68
|
*/
|
|
56
69
|
label: string;
|
|
57
70
|
/**
|
|
58
|
-
* The associated score / probability
|
|
71
|
+
* The associated score / probability.
|
|
59
72
|
*/
|
|
60
73
|
score: number;
|
|
61
74
|
[property: string]: unknown;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/object-detection/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACpC
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/object-detection/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACpC;;;OAGG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,yBAAyB,CAAC;IACvC,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;;;GAIG;AACH,MAAM,WAAW,yBAAyB;IACzC;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;;GAGG;AACH,MAAM,WAAW,WAAW;IAC3B;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD,MAAM,MAAM,qBAAqB,GAAG,4BAA4B,EAAE,CAAC;AACnE;;GAEG;AACH,MAAM,WAAW,4BAA4B;IAC5C;;;OAGG;IACH,GAAG,EAAE,WAAW,CAAC;IACjB;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
|
@@ -5,43 +5,44 @@
|
|
|
5
5
|
*/
|
|
6
6
|
/**
|
|
7
7
|
* Inputs for Summarization inference
|
|
8
|
-
*
|
|
9
|
-
* Inputs for Text2text Generation inference
|
|
10
8
|
*/
|
|
11
9
|
export interface SummarizationInput {
|
|
12
10
|
/**
|
|
13
|
-
* The input text
|
|
11
|
+
* The input text to summarize.
|
|
14
12
|
*/
|
|
15
13
|
inputs: string;
|
|
16
14
|
/**
|
|
17
|
-
* Additional inference parameters
|
|
15
|
+
* Additional inference parameters.
|
|
18
16
|
*/
|
|
19
|
-
parameters?:
|
|
17
|
+
parameters?: SummarizationParameters;
|
|
20
18
|
[property: string]: unknown;
|
|
21
19
|
}
|
|
22
20
|
/**
|
|
23
|
-
* Additional inference parameters
|
|
21
|
+
* Additional inference parameters.
|
|
24
22
|
*
|
|
25
|
-
* Additional inference parameters for
|
|
23
|
+
* Additional inference parameters for summarization.
|
|
26
24
|
*/
|
|
27
|
-
export interface
|
|
25
|
+
export interface SummarizationParameters {
|
|
28
26
|
/**
|
|
29
27
|
* Whether to clean up the potential extra spaces in the text output.
|
|
30
28
|
*/
|
|
31
29
|
clean_up_tokenization_spaces?: boolean;
|
|
32
30
|
/**
|
|
33
|
-
* Additional parametrization of the text generation algorithm
|
|
31
|
+
* Additional parametrization of the text generation algorithm.
|
|
34
32
|
*/
|
|
35
33
|
generate_parameters?: {
|
|
36
34
|
[key: string]: unknown;
|
|
37
35
|
};
|
|
38
36
|
/**
|
|
39
|
-
* The truncation strategy to use
|
|
37
|
+
* The truncation strategy to use.
|
|
40
38
|
*/
|
|
41
|
-
truncation?:
|
|
39
|
+
truncation?: SummarizationTruncationStrategy;
|
|
42
40
|
[property: string]: unknown;
|
|
43
41
|
}
|
|
44
|
-
|
|
42
|
+
/**
|
|
43
|
+
* The truncation strategy to use.
|
|
44
|
+
*/
|
|
45
|
+
export type SummarizationTruncationStrategy = "do_not_truncate" | "longest_first" | "only_first" | "only_second";
|
|
45
46
|
/**
|
|
46
47
|
* Outputs of inference for the Summarization task
|
|
47
48
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/summarization/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/summarization/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,kBAAkB;IAClC;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,uBAAuB,CAAC;IACrC,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,uBAAuB;IACvC;;OAEG;IACH,4BAA4B,CAAC,EAAE,OAAO,CAAC;IACvC;;OAEG;IACH,mBAAmB,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAA;KAAE,CAAC;IACjD;;OAEG;IACH,UAAU,CAAC,EAAE,+BAA+B,CAAC;IAC7C,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,MAAM,+BAA+B,GAAG,iBAAiB,GAAG,eAAe,GAAG,YAAY,GAAG,aAAa,CAAC;AAEjH;;GAEG;AACH,MAAM,WAAW,mBAAmB;IACnC;;OAEG;IACH,YAAY,EAAE,MAAM,CAAC;IACrB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
|
@@ -8,7 +8,7 @@
|
|
|
8
8
|
*/
|
|
9
9
|
export interface TextToImageInput {
|
|
10
10
|
/**
|
|
11
|
-
* The input text data (sometimes called "prompt"
|
|
11
|
+
* The input text data (sometimes called "prompt")
|
|
12
12
|
*/
|
|
13
13
|
inputs: string;
|
|
14
14
|
/**
|
|
@@ -60,7 +60,7 @@ export interface TargetSize {
|
|
|
60
60
|
*/
|
|
61
61
|
export interface TextToImageOutput {
|
|
62
62
|
/**
|
|
63
|
-
* The generated image
|
|
63
|
+
* The generated image returned as raw bytes in the payload.
|
|
64
64
|
*/
|
|
65
65
|
image: unknown;
|
|
66
66
|
[property: string]: unknown;
|
|
@@ -5,43 +5,54 @@
|
|
|
5
5
|
*/
|
|
6
6
|
/**
|
|
7
7
|
* Inputs for Translation inference
|
|
8
|
-
*
|
|
9
|
-
* Inputs for Text2text Generation inference
|
|
10
8
|
*/
|
|
11
9
|
export interface TranslationInput {
|
|
12
10
|
/**
|
|
13
|
-
* The
|
|
11
|
+
* The text to translate.
|
|
14
12
|
*/
|
|
15
13
|
inputs: string;
|
|
16
14
|
/**
|
|
17
15
|
* Additional inference parameters
|
|
18
16
|
*/
|
|
19
|
-
parameters?:
|
|
17
|
+
parameters?: TranslationParameters;
|
|
20
18
|
[property: string]: unknown;
|
|
21
19
|
}
|
|
22
20
|
/**
|
|
23
21
|
* Additional inference parameters
|
|
24
22
|
*
|
|
25
|
-
* Additional inference parameters for
|
|
23
|
+
* Additional inference parameters for Translation
|
|
26
24
|
*/
|
|
27
|
-
export interface
|
|
25
|
+
export interface TranslationParameters {
|
|
28
26
|
/**
|
|
29
27
|
* Whether to clean up the potential extra spaces in the text output.
|
|
30
28
|
*/
|
|
31
29
|
clean_up_tokenization_spaces?: boolean;
|
|
32
30
|
/**
|
|
33
|
-
* Additional parametrization of the text generation algorithm
|
|
31
|
+
* Additional parametrization of the text generation algorithm.
|
|
34
32
|
*/
|
|
35
33
|
generate_parameters?: {
|
|
36
34
|
[key: string]: unknown;
|
|
37
35
|
};
|
|
38
36
|
/**
|
|
39
|
-
* The
|
|
37
|
+
* The source language of the text. Required for models that can translate from multiple
|
|
38
|
+
* languages.
|
|
39
|
+
*/
|
|
40
|
+
src_lang?: string;
|
|
41
|
+
/**
|
|
42
|
+
* Target language to translate to. Required for models that can translate to multiple
|
|
43
|
+
* languages.
|
|
40
44
|
*/
|
|
41
|
-
|
|
45
|
+
tgt_lang?: string;
|
|
46
|
+
/**
|
|
47
|
+
* The truncation strategy to use.
|
|
48
|
+
*/
|
|
49
|
+
truncation?: TranslationTruncationStrategy;
|
|
42
50
|
[property: string]: unknown;
|
|
43
51
|
}
|
|
44
|
-
|
|
52
|
+
/**
|
|
53
|
+
* The truncation strategy to use.
|
|
54
|
+
*/
|
|
55
|
+
export type TranslationTruncationStrategy = "do_not_truncate" | "longest_first" | "only_first" | "only_second";
|
|
45
56
|
/**
|
|
46
57
|
* Outputs of inference for the Translation task
|
|
47
58
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/translation/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH
|
|
1
|
+
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/translation/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,qBAAqB,CAAC;IACnC,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;;;GAIG;AACH,MAAM,WAAW,qBAAqB;IACrC;;OAEG;IACH,4BAA4B,CAAC,EAAE,OAAO,CAAC;IACvC;;OAEG;IACH,mBAAmB,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAA;KAAE,CAAC;IACjD;;;OAGG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB;;;OAGG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB;;OAEG;IACH,UAAU,CAAC,EAAE,6BAA6B,CAAC;IAC3C,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AAED;;GAEG;AACH,MAAM,MAAM,6BAA6B,GAAG,iBAAiB,GAAG,eAAe,GAAG,YAAY,GAAG,aAAa,CAAC;AAE/G;;GAEG;AACH,MAAM,WAAW,iBAAiB;IACjC;;OAEG;IACH,gBAAgB,EAAE,MAAM,CAAC;IACzB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@huggingface/tasks",
|
|
3
3
|
"packageManager": "pnpm@8.10.5",
|
|
4
|
-
"version": "0.11.
|
|
4
|
+
"version": "0.11.13",
|
|
5
5
|
"description": "List of ML tasks for huggingface.co/tasks",
|
|
6
6
|
"repository": "https://github.com/huggingface/huggingface.js.git",
|
|
7
7
|
"publishConfig": {
|
package/src/dataset-libraries.ts
CHANGED
|
@@ -77,6 +77,12 @@ export const DATASET_LIBRARIES_UI_ELEMENTS = {
|
|
|
77
77
|
repoUrl: "https://github.com/pola-rs/polars",
|
|
78
78
|
docsUrl: "https://huggingface.co/docs/hub/datasets-polars",
|
|
79
79
|
},
|
|
80
|
+
duckdb: {
|
|
81
|
+
prettyLabel: "DuckDB",
|
|
82
|
+
repoName: "duckdb",
|
|
83
|
+
repoUrl: "https://github.com/duckdb/duckdb",
|
|
84
|
+
docsUrl: "https://huggingface.co/docs/hub/datasets-duckdb",
|
|
85
|
+
},
|
|
80
86
|
} satisfies Record<string, DatasetLibraryUiElement>;
|
|
81
87
|
|
|
82
88
|
/// List of the dataset libraries supported by the Hub
|
package/src/hardware.ts
CHANGED
|
@@ -164,6 +164,14 @@ export const SKUS = {
|
|
|
164
164
|
tflops: 12.74,
|
|
165
165
|
memory: [12, 8],
|
|
166
166
|
},
|
|
167
|
+
"RTX 2080 Ti": {
|
|
168
|
+
tflops: 26.9,
|
|
169
|
+
memory: [11],
|
|
170
|
+
},
|
|
171
|
+
"RTX 2080": {
|
|
172
|
+
tflops: 20.14,
|
|
173
|
+
memory: [8],
|
|
174
|
+
},
|
|
167
175
|
"RTX 2070": {
|
|
168
176
|
tflops: 14.93,
|
|
169
177
|
memory: [8],
|
package/src/local-apps.ts
CHANGED
|
@@ -237,7 +237,7 @@ export const LOCAL_APPS = {
|
|
|
237
237
|
mainTask: "text-to-image",
|
|
238
238
|
macOSOnly: true,
|
|
239
239
|
displayOnModelPage: (model) => model.library_name === "diffusers" && model.pipeline_tag === "text-to-image",
|
|
240
|
-
deeplink: (model) => new URL(`diffusionbee
|
|
240
|
+
deeplink: (model) => new URL(`https://diffusionbee.com/huggingface_import?model_id=${model.id}`),
|
|
241
241
|
},
|
|
242
242
|
joyfusion: {
|
|
243
243
|
prettyLabel: "JoyFusion",
|