@huggingface/tasks 0.19.21 → 0.19.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/commonjs/library-to-tasks.d.ts.map +1 -1
- package/dist/commonjs/library-to-tasks.js +1 -8
- package/dist/commonjs/model-libraries-snippets.d.ts +3 -0
- package/dist/commonjs/model-libraries-snippets.d.ts.map +1 -1
- package/dist/commonjs/model-libraries-snippets.js +36 -4
- package/dist/commonjs/model-libraries.d.ts +29 -1
- package/dist/commonjs/model-libraries.d.ts.map +1 -1
- package/dist/commonjs/model-libraries.js +28 -0
- package/dist/commonjs/pipelines.d.ts +1 -9
- package/dist/commonjs/pipelines.d.ts.map +1 -1
- package/dist/commonjs/pipelines.js +4 -6
- package/dist/commonjs/snippets/inputs.d.ts.map +1 -1
- package/dist/commonjs/snippets/inputs.js +0 -2
- package/dist/commonjs/tasks/index.d.ts +1 -1
- package/dist/commonjs/tasks/index.d.ts.map +1 -1
- package/dist/commonjs/tasks/index.js +0 -2
- package/dist/commonjs/tasks/placeholder/data.js +1 -1
- package/dist/commonjs/tasks/summarization/data.js +1 -1
- package/dist/commonjs/tasks/translation/data.js +1 -1
- package/dist/esm/library-to-tasks.d.ts.map +1 -1
- package/dist/esm/library-to-tasks.js +1 -8
- package/dist/esm/model-libraries-snippets.d.ts +3 -0
- package/dist/esm/model-libraries-snippets.d.ts.map +1 -1
- package/dist/esm/model-libraries-snippets.js +31 -2
- package/dist/esm/model-libraries.d.ts +29 -1
- package/dist/esm/model-libraries.d.ts.map +1 -1
- package/dist/esm/model-libraries.js +28 -0
- package/dist/esm/pipelines.d.ts +1 -9
- package/dist/esm/pipelines.d.ts.map +1 -1
- package/dist/esm/pipelines.js +4 -6
- package/dist/esm/snippets/inputs.d.ts.map +1 -1
- package/dist/esm/snippets/inputs.js +0 -2
- package/dist/esm/tasks/index.d.ts +1 -1
- package/dist/esm/tasks/index.d.ts.map +1 -1
- package/dist/esm/tasks/index.js +0 -2
- package/dist/esm/tasks/placeholder/data.js +1 -1
- package/dist/esm/tasks/summarization/data.js +1 -1
- package/dist/esm/tasks/translation/data.js +1 -1
- package/package.json +1 -1
- package/src/library-to-tasks.ts +1 -8
- package/src/model-libraries-snippets.ts +34 -2
- package/src/model-libraries.ts +28 -0
- package/src/pipelines.ts +4 -6
- package/src/snippets/inputs.ts +0 -3
- package/src/tasks/index.ts +1 -2
- package/src/tasks/placeholder/data.ts +1 -1
- package/src/tasks/summarization/data.ts +1 -1
- package/src/tasks/text-generation/about.md +2 -2
- package/src/tasks/translation/data.ts +1 -1
- package/dist/commonjs/tasks/text2text-generation/inference.d.ts +0 -52
- package/dist/commonjs/tasks/text2text-generation/inference.d.ts.map +0 -1
- package/dist/commonjs/tasks/text2text-generation/inference.js +0 -2
- package/dist/esm/tasks/text2text-generation/inference.d.ts +0 -52
- package/dist/esm/tasks/text2text-generation/inference.d.ts.map +0 -1
- package/dist/esm/tasks/text2text-generation/inference.js +0 -1
- package/src/tasks/text2text-generation/inference.ts +0 -51
- package/src/tasks/text2text-generation/spec/input.json +0 -54
- package/src/tasks/text2text-generation/spec/output.json +0 -14
|
@@ -656,6 +656,13 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
656
656
|
filter: false;
|
|
657
657
|
countDownloads: string;
|
|
658
658
|
};
|
|
659
|
+
mtvcraft: {
|
|
660
|
+
prettyLabel: string;
|
|
661
|
+
repoName: string;
|
|
662
|
+
repoUrl: string;
|
|
663
|
+
filter: false;
|
|
664
|
+
countDownloads: string;
|
|
665
|
+
};
|
|
659
666
|
nemo: {
|
|
660
667
|
prettyLabel: string;
|
|
661
668
|
repoName: string;
|
|
@@ -701,6 +708,13 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
701
708
|
filter: true;
|
|
702
709
|
countDownloads: string;
|
|
703
710
|
};
|
|
711
|
+
PaddleOCR: {
|
|
712
|
+
prettyLabel: string;
|
|
713
|
+
repoName: string;
|
|
714
|
+
repoUrl: string;
|
|
715
|
+
snippets: (model: ModelData) => string[];
|
|
716
|
+
filter: true;
|
|
717
|
+
};
|
|
704
718
|
peft: {
|
|
705
719
|
prettyLabel: string;
|
|
706
720
|
repoName: string;
|
|
@@ -775,6 +789,13 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
775
789
|
filter: false;
|
|
776
790
|
countDownloads: string;
|
|
777
791
|
};
|
|
792
|
+
renderformer: {
|
|
793
|
+
prettyLabel: string;
|
|
794
|
+
repoName: string;
|
|
795
|
+
repoUrl: string;
|
|
796
|
+
snippets: (model: ModelData) => string[];
|
|
797
|
+
filter: false;
|
|
798
|
+
};
|
|
778
799
|
reverb: {
|
|
779
800
|
prettyLabel: string;
|
|
780
801
|
repoName: string;
|
|
@@ -1077,6 +1098,13 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
1077
1098
|
countDownloads: string;
|
|
1078
1099
|
snippets: (model: ModelData) => string[];
|
|
1079
1100
|
};
|
|
1101
|
+
videoprism: {
|
|
1102
|
+
prettyLabel: string;
|
|
1103
|
+
repoName: string;
|
|
1104
|
+
repoUrl: string;
|
|
1105
|
+
countDownloads: string;
|
|
1106
|
+
snippets: (model: ModelData) => string[];
|
|
1107
|
+
};
|
|
1080
1108
|
"vfi-mamba": {
|
|
1081
1109
|
prettyLabel: string;
|
|
1082
1110
|
repoName: string;
|
|
@@ -1140,5 +1168,5 @@ export declare const MODEL_LIBRARIES_UI_ELEMENTS: {
|
|
|
1140
1168
|
};
|
|
1141
1169
|
export type ModelLibraryKey = keyof typeof MODEL_LIBRARIES_UI_ELEMENTS;
|
|
1142
1170
|
export declare const ALL_MODEL_LIBRARY_KEYS: ModelLibraryKey[];
|
|
1143
|
-
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("acestep" | "adapter-transformers" | "allennlp" | "anemoi" | "araclip" | "asteroid" | "audiocraft" | "audioseal" | "bagel-mot" | "ben2" | "bertopic" | "big_vision" | "birder" | "birefnet" | "bm25s" | "champ" | "chatterbox" | "chat_tts" | "colpali" | "comet" | "contexttab" | "cosmos" | "cxr-foundation" | "deepforest" | "depth-anything-v2" | "depth-pro" | "derm-foundation" | "describe-anything" | "dia-tts" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "clipscope" | "cosyvoice" | "cotracker" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "geometry-crafter" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hezar" | "htrflow" | "hunyuan-dit" | "hunyuan3d-2" | "imstoucan" | "index-tts" | "infinite-you" | "keras" | "tf-keras" | "keras-hub" | "kimi-audio" | "k2" | "lightning-ir" | "litert-lm" | "lerobot" | "liveportrait" | "llama-cpp-python" | "mini-omni2" | "mindspore" | "magi-1" | "mamba-ssm" | "mars5-tts" | "matanyone" | "mesh-anything" | "merlin" | "medvae" | "mitie" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "model2vec" | "moshi" | "nemo" | "open-oasis" | "open_clip" | "open-sora" | "outetts" | "paddlenlp" | "peft" | "perception-encoder" | "phantom-wan" | "pxia" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "reverb" | "saelens" | "sam2" | "sample-factory" | "sapiens" | "seedvr" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "ssr-speech" | "stable-audio-tools" | "monkeyocr" | "diffusion-single-file" | "seed-story" | "soloaudio" | "stable-baselines3" | "stanza" | "swarmformer" | "f5-tts" | "genmo" | "tencent-song-generation" | "tensorflowtts" | "tabpfn" | "terratorch" | "tic-clip" | "timesfm" | "timm" | "tirex" | "torchgeo" | "transformers" | "transformers.js" | "trellis" | "ultralytics" | "univa" | "uni-3dar" | "unity-sentis" | "sana" | "vfi-mamba" | "voicecraft" | "vui" | "wham" | "whisperkit" | "yolov10" | "zonos" | "3dtopia-xl")[];
|
|
1171
|
+
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("acestep" | "adapter-transformers" | "allennlp" | "anemoi" | "araclip" | "asteroid" | "audiocraft" | "audioseal" | "bagel-mot" | "ben2" | "bertopic" | "big_vision" | "birder" | "birefnet" | "bm25s" | "champ" | "chatterbox" | "chat_tts" | "colpali" | "comet" | "contexttab" | "cosmos" | "cxr-foundation" | "deepforest" | "depth-anything-v2" | "depth-pro" | "derm-foundation" | "describe-anything" | "dia-tts" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "clipscope" | "cosyvoice" | "cotracker" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "geometry-crafter" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hezar" | "htrflow" | "hunyuan-dit" | "hunyuan3d-2" | "imstoucan" | "index-tts" | "infinite-you" | "keras" | "tf-keras" | "keras-hub" | "kimi-audio" | "k2" | "lightning-ir" | "litert-lm" | "lerobot" | "liveportrait" | "llama-cpp-python" | "mini-omni2" | "mindspore" | "magi-1" | "mamba-ssm" | "mars5-tts" | "matanyone" | "mesh-anything" | "merlin" | "medvae" | "mitie" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "model2vec" | "moshi" | "mtvcraft" | "nemo" | "open-oasis" | "open_clip" | "open-sora" | "outetts" | "paddlenlp" | "PaddleOCR" | "peft" | "perception-encoder" | "phantom-wan" | "pxia" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "renderformer" | "reverb" | "saelens" | "sam2" | "sample-factory" | "sapiens" | "seedvr" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "ssr-speech" | "stable-audio-tools" | "monkeyocr" | "diffusion-single-file" | "seed-story" | "soloaudio" | "stable-baselines3" | "stanza" | "swarmformer" | "f5-tts" | "genmo" | "tencent-song-generation" | "tensorflowtts" | "tabpfn" | "terratorch" | "tic-clip" | "timesfm" | "timm" | "tirex" | "torchgeo" | "transformers" | "transformers.js" | "trellis" | "ultralytics" | "univa" | "uni-3dar" | "unity-sentis" | "sana" | "videoprism" | "vfi-mamba" | "voicecraft" | "vui" | "wham" | "whisperkit" | "yolov10" | "zonos" | "3dtopia-xl")[];
|
|
1144
1172
|
//# sourceMappingURL=model-libraries.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,gCAAgC,CAAC;AAEzE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B
|
|
1
|
+
{"version":3,"file":"model-libraries.d.ts","sourceRoot":"","sources":["../../src/model-libraries.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,iBAAiB,CAAC;AACjD,OAAO,KAAK,EAAE,kBAAkB,EAAE,MAAM,gCAAgC,CAAC;AAEzE;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAChC;;;;OAIG;IACH,WAAW,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,SAAS,KAAK,MAAM,EAAE,CAAC;IAC1C;;;;;OAKG;IACH,cAAc,CAAC,EAAE,kBAAkB,CAAC;IACpC;;;OAGG;IACH,MAAM,CAAC,EAAE,OAAO,CAAC;CACjB;AAED;;;;;;;;;;;;;GAaG;AAEH,eAAO,MAAM,2BAA2B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA8lCI,CAAC;AAE7C,MAAM,MAAM,eAAe,GAAG,MAAM,OAAO,2BAA2B,CAAC;AAEvE,eAAO,MAAM,sBAAsB,EAA+C,eAAe,EAAE,CAAC;AAEpG,eAAO,MAAM,8BAA8B,0jEAQ1B,CAAC"}
|
|
@@ -618,6 +618,13 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
618
618
|
filter: false,
|
|
619
619
|
countDownloads: `path:"tokenizer-e351c8d8-checkpoint125.safetensors"`,
|
|
620
620
|
},
|
|
621
|
+
mtvcraft: {
|
|
622
|
+
prettyLabel: "MTVCraft",
|
|
623
|
+
repoName: "MTVCraft",
|
|
624
|
+
repoUrl: "https://github.com/baaivision/MTVCraft",
|
|
625
|
+
filter: false,
|
|
626
|
+
countDownloads: `path:"vae/3d-vae.pt"`,
|
|
627
|
+
},
|
|
621
628
|
nemo: {
|
|
622
629
|
prettyLabel: "NeMo",
|
|
623
630
|
repoName: "NeMo",
|
|
@@ -666,6 +673,13 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
666
673
|
filter: true,
|
|
667
674
|
countDownloads: `path:"model_config.json"`,
|
|
668
675
|
},
|
|
676
|
+
PaddleOCR: {
|
|
677
|
+
prettyLabel: "PaddleOCR",
|
|
678
|
+
repoName: "PaddleOCR",
|
|
679
|
+
repoUrl: "https://github.com/PaddlePaddle/PaddleOCR",
|
|
680
|
+
snippets: snippets.paddleocr,
|
|
681
|
+
filter: true,
|
|
682
|
+
},
|
|
669
683
|
peft: {
|
|
670
684
|
prettyLabel: "PEFT",
|
|
671
685
|
repoName: "PEFT",
|
|
@@ -740,6 +754,13 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
740
754
|
filter: false,
|
|
741
755
|
countDownloads: `path:"model.safetensors"`,
|
|
742
756
|
},
|
|
757
|
+
renderformer: {
|
|
758
|
+
prettyLabel: "RenderFormer",
|
|
759
|
+
repoName: "RenderFormer",
|
|
760
|
+
repoUrl: "https://github.com/microsoft/renderformer",
|
|
761
|
+
snippets: snippets.renderformer,
|
|
762
|
+
filter: false,
|
|
763
|
+
},
|
|
743
764
|
reverb: {
|
|
744
765
|
prettyLabel: "Reverb",
|
|
745
766
|
repoName: "Reverb",
|
|
@@ -1042,6 +1063,13 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
1042
1063
|
countDownloads: `path_extension:"pth"`,
|
|
1043
1064
|
snippets: snippets.sana,
|
|
1044
1065
|
},
|
|
1066
|
+
videoprism: {
|
|
1067
|
+
prettyLabel: "VideoPrism",
|
|
1068
|
+
repoName: "VideoPrism",
|
|
1069
|
+
repoUrl: "https://github.com/google-deepmind/videoprism",
|
|
1070
|
+
countDownloads: `path_extension:"npz"`,
|
|
1071
|
+
snippets: snippets.videoprism,
|
|
1072
|
+
},
|
|
1045
1073
|
"vfi-mamba": {
|
|
1046
1074
|
prettyLabel: "VFIMamba",
|
|
1047
1075
|
repoName: "VFIMamba",
|
package/dist/esm/pipelines.d.ts
CHANGED
|
@@ -105,14 +105,6 @@ export declare const PIPELINE_DATA: {
|
|
|
105
105
|
}[];
|
|
106
106
|
modality: "nlp";
|
|
107
107
|
};
|
|
108
|
-
"text2text-generation": {
|
|
109
|
-
name: string;
|
|
110
|
-
subtasks: {
|
|
111
|
-
type: string;
|
|
112
|
-
name: string;
|
|
113
|
-
}[];
|
|
114
|
-
modality: "nlp";
|
|
115
|
-
};
|
|
116
108
|
"fill-mask": {
|
|
117
109
|
name: string;
|
|
118
110
|
subtasks: {
|
|
@@ -381,5 +373,5 @@ export type PipelineType = keyof typeof PIPELINE_DATA;
|
|
|
381
373
|
export type WidgetType = PipelineType | "conversational";
|
|
382
374
|
export declare const PIPELINE_TYPES: PipelineType[];
|
|
383
375
|
export declare const SUBTASK_TYPES: string[];
|
|
384
|
-
export declare const PIPELINE_TYPES_SET: Set<"other" | "text-classification" | "token-classification" | "table-question-answering" | "question-answering" | "zero-shot-classification" | "translation" | "summarization" | "feature-extraction" | "text-generation" | "
|
|
376
|
+
export declare const PIPELINE_TYPES_SET: Set<"other" | "text-classification" | "token-classification" | "table-question-answering" | "question-answering" | "zero-shot-classification" | "translation" | "summarization" | "feature-extraction" | "text-generation" | "fill-mask" | "sentence-similarity" | "text-to-speech" | "text-to-audio" | "automatic-speech-recognition" | "audio-to-audio" | "audio-classification" | "audio-text-to-text" | "voice-activity-detection" | "depth-estimation" | "image-classification" | "object-detection" | "image-segmentation" | "text-to-image" | "image-to-text" | "image-to-image" | "image-to-video" | "unconditional-image-generation" | "video-classification" | "reinforcement-learning" | "robotics" | "tabular-classification" | "tabular-regression" | "tabular-to-text" | "table-to-text" | "multiple-choice" | "text-ranking" | "text-retrieval" | "time-series-forecasting" | "text-to-video" | "image-text-to-text" | "visual-question-answering" | "document-question-answering" | "zero-shot-image-classification" | "graph-ml" | "mask-generation" | "zero-shot-object-detection" | "text-to-3d" | "image-to-3d" | "image-feature-extraction" | "video-text-to-text" | "keypoint-detection" | "visual-document-retrieval" | "any-to-any" | "video-to-video">;
|
|
385
377
|
//# sourceMappingURL=pipelines.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"pipelines.d.ts","sourceRoot":"","sources":["../../src/pipelines.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,UAAU,yEAA0E,CAAC;AAElG,MAAM,MAAM,QAAQ,GAAG,CAAC,OAAO,UAAU,CAAC,CAAC,MAAM,CAAC,CAAC;AAEnD,eAAO,MAAM,eAAe;;;;;;;;CAQQ,CAAC;AAErC;;;;;;GAMG;AACH,MAAM,WAAW,OAAO;IACvB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;CACb;AAED;;;;;GAKG;AACH,MAAM,WAAW,YAAY;IAC5B;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC;IACrB,QAAQ,EAAE,QAAQ,CAAC;IACnB;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;CACzB;AAcD,eAAO,MAAM,aAAa
|
|
1
|
+
{"version":3,"file":"pipelines.d.ts","sourceRoot":"","sources":["../../src/pipelines.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,UAAU,yEAA0E,CAAC;AAElG,MAAM,MAAM,QAAQ,GAAG,CAAC,OAAO,UAAU,CAAC,CAAC,MAAM,CAAC,CAAC;AAEnD,eAAO,MAAM,eAAe;;;;;;;;CAQQ,CAAC;AAErC;;;;;;GAMG;AACH,MAAM,WAAW,OAAO;IACvB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;CACb;AAED;;;;;GAKG;AACH,MAAM,WAAW,YAAY;IAC5B;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,OAAO,EAAE,CAAC;IACrB,QAAQ,EAAE,QAAQ,CAAC;IACnB;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;CACzB;AAcD,eAAO,MAAM,aAAa;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAgkBc,CAAC;AAEzC,MAAM,MAAM,YAAY,GAAG,MAAM,OAAO,aAAa,CAAC;AAEtD,MAAM,MAAM,UAAU,GAAG,YAAY,GAAG,gBAAgB,CAAC;AAEzD,eAAO,MAAM,cAAc,EAAiC,YAAY,EAAE,CAAC;AAE3E,eAAO,MAAM,aAAa,UAEN,CAAC;AAErB,eAAO,MAAM,kBAAkB,itCAA0B,CAAC"}
|
package/dist/esm/pipelines.js
CHANGED
|
@@ -192,12 +192,6 @@ export const PIPELINE_DATA = {
|
|
|
192
192
|
type: "language-modeling",
|
|
193
193
|
name: "Language Modeling",
|
|
194
194
|
},
|
|
195
|
-
],
|
|
196
|
-
modality: "nlp",
|
|
197
|
-
},
|
|
198
|
-
"text2text-generation": {
|
|
199
|
-
name: "Text2Text Generation",
|
|
200
|
-
subtasks: [
|
|
201
195
|
{
|
|
202
196
|
type: "text-simplification",
|
|
203
197
|
name: "Text simplification",
|
|
@@ -226,6 +220,10 @@ export const PIPELINE_DATA = {
|
|
|
226
220
|
type: "closed-book-qa",
|
|
227
221
|
name: "Closed Book QA",
|
|
228
222
|
},
|
|
223
|
+
{
|
|
224
|
+
type: "text2text-generation",
|
|
225
|
+
name: "Text2Text Generation",
|
|
226
|
+
},
|
|
229
227
|
],
|
|
230
228
|
modality: "nlp",
|
|
231
229
|
},
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inputs.d.ts","sourceRoot":"","sources":["../../../src/snippets/inputs.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,0BAA0B,EAAE,MAAM,mBAAmB,CAAC;AACpE,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;
|
|
1
|
+
{"version":3,"file":"inputs.d.ts","sourceRoot":"","sources":["../../../src/snippets/inputs.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,0BAA0B,EAAE,MAAM,mBAAmB,CAAC;AACpE,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAC;AAyJnD,wBAAgB,oBAAoB,CACnC,KAAK,EAAE,gBAAgB,EACvB,MAAM,UAAQ,EACd,QAAQ,UAAQ,GACd,MAAM,GAAG,0BAA0B,EAAE,CAmBvC"}
|
|
@@ -48,7 +48,6 @@ const inputsTextGeneration = (model) => {
|
|
|
48
48
|
}
|
|
49
49
|
return `"Can you please let us know more details about your "`;
|
|
50
50
|
};
|
|
51
|
-
const inputsText2TextGeneration = () => `"The answer to the universe is"`;
|
|
52
51
|
const inputsFillMask = (model) => `"The answer to the universe is ${model.mask_token}."`;
|
|
53
52
|
const inputsSentenceSimilarity = () => `{
|
|
54
53
|
"source_sentence": "That is a happy person",
|
|
@@ -106,7 +105,6 @@ const modelInputSnippets = {
|
|
|
106
105
|
"text-to-video": inputsTextToVideo,
|
|
107
106
|
"text-to-speech": inputsTextToSpeech,
|
|
108
107
|
"text-to-audio": inputsTextToAudio,
|
|
109
|
-
"text2text-generation": inputsText2TextGeneration,
|
|
110
108
|
"token-classification": inputsTokenClassification,
|
|
111
109
|
translation: inputsTranslation,
|
|
112
110
|
"zero-shot-classification": inputsZeroShotClassification,
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import type { PipelineType } from "../pipelines.js";
|
|
2
2
|
export type * from "./audio-classification/inference.js";
|
|
3
3
|
export type * from "./automatic-speech-recognition/inference.js";
|
|
4
|
-
export type { ChatCompletionInput, ChatCompletionInputMessage, ChatCompletionOutput, ChatCompletionOutputComplete, ChatCompletionOutputMessage, ChatCompletionStreamOutput, ChatCompletionStreamOutputChoice, ChatCompletionStreamOutputDelta, } from "./chat-completion/inference.js";
|
|
4
|
+
export type { ChatCompletionInput, ChatCompletionInputMessage, ChatCompletionInputMessageChunkType, ChatCompletionOutput, ChatCompletionOutputComplete, ChatCompletionOutputMessage, ChatCompletionStreamOutput, ChatCompletionStreamOutputChoice, ChatCompletionStreamOutputDelta, } from "./chat-completion/inference.js";
|
|
5
5
|
export type * from "./document-question-answering/inference.js";
|
|
6
6
|
export type * from "./feature-extraction/inference.js";
|
|
7
7
|
export type * from "./fill-mask/inference.js";
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/tasks/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAgDpD,mBAAmB,qCAAqC,CAAC;AACzD,mBAAmB,6CAA6C,CAAC;AACjE,YAAY,EACX,mBAAmB,EACnB,0BAA0B,EAC1B,oBAAoB,EACpB,4BAA4B,EAC5B,2BAA2B,EAC3B,0BAA0B,EAC1B,gCAAgC,EAChC,+BAA+B,GAC/B,MAAM,gCAAgC,CAAC;AACxC,mBAAmB,4CAA4C,CAAC;AAChE,mBAAmB,mCAAmC,CAAC;AACvD,mBAAmB,0BAA0B,CAAC;AAC9C,YAAY,EACX,wBAAwB,EACxB,yBAAyB,EACzB,gCAAgC,EAChC,6BAA6B,GAC7B,MAAM,qCAAqC,CAAC;AAC7C,mBAAmB,+BAA+B,CAAC;AACnD,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,8BAA8B,CAAC;AAC/G,mBAAmB,mCAAmC,CAAC;AACvD,YAAY,EAAE,iBAAiB,EAAE,kBAAkB,EAAE,sBAAsB,EAAE,MAAM,+BAA+B,CAAC;AACnH,mBAAmB,iCAAiC,CAAC;AACrD,mBAAmB,iCAAiC,CAAC;AACrD,mBAAmB,mCAAmC,CAAC;AACvD,mBAAmB,oCAAoC,CAAC;AACxD,mBAAmB,8BAA8B,CAAC;AAClD,mBAAmB,yCAAyC,CAAC;AAC7D,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,8BAA8B,CAAC;AAC/G,YAAY,EAAE,qBAAqB,EAAE,iBAAiB,EAAE,gBAAgB,EAAE,MAAM,8BAA8B,CAAC;AAC/G,YAAY,EAAE,sBAAsB,EAAE,iBAAiB,EAAE,kBAAkB,EAAE,MAAM,+BAA+B,CAAC;AACnH,mBAAmB,qCAAqC,CAAC;AACzD,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,4BAA4B,CAAC;AACtF,YAAY,EACX,6BAA6B,EAC7B,uBAAuB,EACvB,wBAAwB,EACxB,+BAA+B,EAC/B,4BAA4B,GAC5B,MAAM,oCAAoC,CAAC;AAC5C,YAAY,EACX,gCAAgC,EAChC,gCAAgC,EAChC,mBAAmB,EACnB,oBAAoB,EACpB,2BAA2B,EAC3B,qCAAqC,EACrC,kCAAkC,EAClC,yBAAyB,EACzB,uCAAuC,EACvC,0BAA0B,GAC1B,MAAM,gCAAgC,CAAC;AACxC,mBAAmB,qCAAqC,CAAC;AACzD,mBAAmB,0CAA0C,CAAC;AAC9D,mBAAmB,yCAAyC,CAAC;AAC7D,mBAAmB,+CAA+C,CAAC;AACnE,YAAY,EACX,WAAW,EACX,4BAA4B,EAC5B,6BAA6B,EAC7B,oCAAoC,GACpC,MAAM,2CAA2C,CAAC;AAEnD,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,uBAAuB,CAAC;AAC7D;;GAEG;AACH,eAAO,MAAM,qBAAqB,EAAE,MAAM,CAAC,YAAY,EAAE,eAAe,EAAE,
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/tasks/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAgDpD,mBAAmB,qCAAqC,CAAC;AACzD,mBAAmB,6CAA6C,CAAC;AACjE,YAAY,EACX,mBAAmB,EACnB,0BAA0B,EAC1B,mCAAmC,EACnC,oBAAoB,EACpB,4BAA4B,EAC5B,2BAA2B,EAC3B,0BAA0B,EAC1B,gCAAgC,EAChC,+BAA+B,GAC/B,MAAM,gCAAgC,CAAC;AACxC,mBAAmB,4CAA4C,CAAC;AAChE,mBAAmB,mCAAmC,CAAC;AACvD,mBAAmB,0BAA0B,CAAC;AAC9C,YAAY,EACX,wBAAwB,EACxB,yBAAyB,EACzB,gCAAgC,EAChC,6BAA6B,GAC7B,MAAM,qCAAqC,CAAC;AAC7C,mBAAmB,+BAA+B,CAAC;AACnD,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,8BAA8B,CAAC;AAC/G,mBAAmB,mCAAmC,CAAC;AACvD,YAAY,EAAE,iBAAiB,EAAE,kBAAkB,EAAE,sBAAsB,EAAE,MAAM,+BAA+B,CAAC;AACnH,mBAAmB,iCAAiC,CAAC;AACrD,mBAAmB,iCAAiC,CAAC;AACrD,mBAAmB,mCAAmC,CAAC;AACvD,mBAAmB,oCAAoC,CAAC;AACxD,mBAAmB,8BAA8B,CAAC;AAClD,mBAAmB,yCAAyC,CAAC;AAC7D,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,qBAAqB,EAAE,MAAM,8BAA8B,CAAC;AAC/G,YAAY,EAAE,qBAAqB,EAAE,iBAAiB,EAAE,gBAAgB,EAAE,MAAM,8BAA8B,CAAC;AAC/G,YAAY,EAAE,sBAAsB,EAAE,iBAAiB,EAAE,kBAAkB,EAAE,MAAM,+BAA+B,CAAC;AACnH,mBAAmB,qCAAqC,CAAC;AACzD,YAAY,EAAE,gBAAgB,EAAE,iBAAiB,EAAE,MAAM,4BAA4B,CAAC;AACtF,YAAY,EACX,6BAA6B,EAC7B,uBAAuB,EACvB,wBAAwB,EACxB,+BAA+B,EAC/B,4BAA4B,GAC5B,MAAM,oCAAoC,CAAC;AAC5C,YAAY,EACX,gCAAgC,EAChC,gCAAgC,EAChC,mBAAmB,EACnB,oBAAoB,EACpB,2BAA2B,EAC3B,qCAAqC,EACrC,kCAAkC,EAClC,yBAAyB,EACzB,uCAAuC,EACvC,0BAA0B,GAC1B,MAAM,gCAAgC,CAAC;AACxC,mBAAmB,qCAAqC,CAAC;AACzD,mBAAmB,0CAA0C,CAAC;AAC9D,mBAAmB,yCAAyC,CAAC;AAC7D,mBAAmB,+CAA+C,CAAC;AACnE,YAAY,EACX,WAAW,EACX,4BAA4B,EAC5B,6BAA6B,EAC7B,oCAAoC,GACpC,MAAM,2CAA2C,CAAC;AAEnD,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,uBAAuB,CAAC;AAC7D;;GAEG;AACH,eAAO,MAAM,qBAAqB,EAAE,MAAM,CAAC,YAAY,EAAE,eAAe,EAAE,CAgEzE,CAAC;AAoBF,eAAO,MAAM,UAAU,EAAE,MAAM,CAAC,YAAY,EAAE,QAAQ,GAAG,SAAS,CAwDxD,CAAC;AAEX,MAAM,WAAW,WAAW;IAC3B,WAAW,EAAE,MAAM,CAAC;IACpB,EAAE,EAAE,MAAM,CAAC;CACX;AAED,MAAM,MAAM,aAAa,GACtB;IACA,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,EAAE,OAAO,CAAC;CACb,GACD;IACA,IAAI,EAAE,KAAK,CAAC;QACX,KAAK,EAAE,MAAM,CAAC;QACd,KAAK,EAAE,MAAM,CAAC;KACd,CAAC,CAAC;IACH,IAAI,EAAE,OAAO,CAAC;CACb,GACD;IACA,QAAQ,EAAE,MAAM,CAAC;IACjB,IAAI,EAAE,KAAK,CAAC;CACX,GACD;IACA,KAAK,EAAE,MAAM,EAAE,EAAE,CAAC;IAClB,IAAI,EAAE,SAAS,CAAC;CACf,GACD;IACA,OAAO,EAAE,MAAM,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;CACZ,GACD;IACA,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,KAAK,CAAC;QACb,GAAG,EAAE,MAAM,CAAC;QACZ,KAAK,EAAE,MAAM,CAAC;QACd,IAAI,EAAE,MAAM,CAAC;KACb,CAAC,CAAC;IACH,IAAI,EAAE,kBAAkB,CAAC;CACxB,CAAC;AAEL,MAAM,WAAW,QAAQ;IACxB,MAAM,EAAE,aAAa,EAAE,CAAC;IACxB,OAAO,EAAE,aAAa,EAAE,CAAC;CACzB;AAED,MAAM,WAAW,QAAQ;IACxB,QAAQ,EAAE,WAAW,EAAE,CAAC;IACxB,IAAI,EAAE,QAAQ,CAAC;IACf,EAAE,EAAE,YAAY,CAAC;IACjB,WAAW,CAAC,EAAE,YAAY,CAAC;IAC3B,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB,KAAK,EAAE,MAAM,CAAC;IACd,SAAS,EAAE,eAAe,EAAE,CAAC;IAC7B,OAAO,EAAE,WAAW,EAAE,CAAC;IACvB,MAAM,EAAE,WAAW,EAAE,CAAC;IACtB,MAAM,EAAE,WAAW,EAAE,CAAC;IACtB,OAAO,EAAE,MAAM,CAAC;IAChB,YAAY,EAAE,MAAM,EAAE,CAAC;IACvB,SAAS,CAAC,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,MAAM,cAAc,GAAG,IAAI,CAAC,QAAQ,EAAE,IAAI,GAAG,OAAO,GAAG,WAAW,CAAC,CAAC"}
|
package/dist/esm/tasks/index.js
CHANGED
|
@@ -87,7 +87,6 @@ export const TASKS_MODEL_LIBRARIES = {
|
|
|
87
87
|
"text-to-speech": ["espnet", "tensorflowtts", "transformers", "transformers.js"],
|
|
88
88
|
"text-to-audio": ["transformers", "transformers.js"],
|
|
89
89
|
"text-to-video": ["diffusers"],
|
|
90
|
-
"text2text-generation": ["transformers", "transformers.js"],
|
|
91
90
|
"time-series-forecasting": [],
|
|
92
91
|
"token-classification": [
|
|
93
92
|
"adapter-transformers",
|
|
@@ -172,7 +171,6 @@ export const TASKS_DATA = {
|
|
|
172
171
|
"text-to-speech": getData("text-to-speech", textToSpeech),
|
|
173
172
|
"text-to-audio": undefined,
|
|
174
173
|
"text-to-video": getData("text-to-video", textToVideo),
|
|
175
|
-
"text2text-generation": undefined,
|
|
176
174
|
"time-series-forecasting": undefined,
|
|
177
175
|
"token-classification": getData("token-classification", tokenClassification),
|
|
178
176
|
translation: getData("translation", translation),
|
|
@@ -12,7 +12,7 @@ const taskData = {
|
|
|
12
12
|
widgetModels: [],
|
|
13
13
|
youtubeId: undefined,
|
|
14
14
|
/// If this is a subtask, link to the most general task ID
|
|
15
|
-
/// (eg,
|
|
15
|
+
/// (eg, text-generation is the canonical ID of text-simplification)
|
|
16
16
|
canonicalId: undefined,
|
|
17
17
|
};
|
|
18
18
|
export default taskData;
|
package/package.json
CHANGED
package/src/library-to-tasks.ts
CHANGED
|
@@ -35,13 +35,7 @@ export const LIBRARY_TASK_MAPPING: Partial<Record<ModelLibraryKey, PipelineType[
|
|
|
35
35
|
sklearn: ["tabular-classification", "tabular-regression", "text-classification"],
|
|
36
36
|
spacy: ["token-classification", "text-classification", "sentence-similarity"],
|
|
37
37
|
"span-marker": ["token-classification"],
|
|
38
|
-
speechbrain: [
|
|
39
|
-
"audio-classification",
|
|
40
|
-
"audio-to-audio",
|
|
41
|
-
"automatic-speech-recognition",
|
|
42
|
-
"text-to-speech",
|
|
43
|
-
"text2text-generation",
|
|
44
|
-
],
|
|
38
|
+
speechbrain: ["audio-classification", "audio-to-audio", "automatic-speech-recognition", "text-to-speech"],
|
|
45
39
|
stanza: ["token-classification"],
|
|
46
40
|
timm: ["image-classification", "image-feature-extraction"],
|
|
47
41
|
transformers: [
|
|
@@ -62,7 +56,6 @@ export const LIBRARY_TASK_MAPPING: Partial<Record<ModelLibraryKey, PipelineType[
|
|
|
62
56
|
"question-answering",
|
|
63
57
|
"summarization",
|
|
64
58
|
"table-question-answering",
|
|
65
|
-
"text2text-generation",
|
|
66
59
|
"text-classification",
|
|
67
60
|
"text-generation",
|
|
68
61
|
"text-to-audio",
|
|
@@ -961,6 +961,17 @@ export const paddlenlp = (model: ModelData): string[] => {
|
|
|
961
961
|
}
|
|
962
962
|
};
|
|
963
963
|
|
|
964
|
+
export const paddleocr = (model: ModelData): string[] => [
|
|
965
|
+
`# pip install paddleocr
|
|
966
|
+
from paddleocr import TextDetection
|
|
967
|
+
model = TextDetection(model_name="${model.id}")
|
|
968
|
+
output = model.predict(input="path/to/image.png", batch_size=1)
|
|
969
|
+
for res in output:
|
|
970
|
+
res.print()
|
|
971
|
+
res.save_to_img(save_path="./output/")
|
|
972
|
+
res.save_to_json(save_path="./output/res.json")`,
|
|
973
|
+
];
|
|
974
|
+
|
|
964
975
|
export const perception_encoder = (model: ModelData): string[] => {
|
|
965
976
|
const clip_model = `# Use PE-Core models as CLIP models
|
|
966
977
|
import core.vision_encoder.pe as pe
|
|
@@ -1035,6 +1046,13 @@ export const relik = (model: ModelData): string[] => [
|
|
|
1035
1046
|
relik = Relik.from_pretrained("${model.id}")`,
|
|
1036
1047
|
];
|
|
1037
1048
|
|
|
1049
|
+
export const renderformer = (model: ModelData): string[] => [
|
|
1050
|
+
`# Install from https://github.com/microsoft/renderformer
|
|
1051
|
+
|
|
1052
|
+
from renderformer import RenderFormerRenderingPipeline
|
|
1053
|
+
pipeline = RenderFormerRenderingPipeline.from_pretrained("${model.id}")`,
|
|
1054
|
+
];
|
|
1055
|
+
|
|
1038
1056
|
const tensorflowttsTextToMel = (model: ModelData): string[] => [
|
|
1039
1057
|
`from tensorflow_tts.inference import AutoProcessor, TFAutoModel
|
|
1040
1058
|
|
|
@@ -1535,6 +1553,20 @@ image = sana(
|
|
|
1535
1553
|
) `,
|
|
1536
1554
|
];
|
|
1537
1555
|
|
|
1556
|
+
export const videoprism = (model: ModelData): string[] => [
|
|
1557
|
+
`# Install from https://github.com/google-deepmind/videoprism
|
|
1558
|
+
import jax
|
|
1559
|
+
import jax.numpy as jnp
|
|
1560
|
+
from videoprism import models as vp
|
|
1561
|
+
|
|
1562
|
+
flax_model = vp.MODELS["${model.id}"]()
|
|
1563
|
+
loaded_state = vp.load_pretrained_weights("${model.id}")
|
|
1564
|
+
|
|
1565
|
+
@jax.jit
|
|
1566
|
+
def forward_fn(inputs, train=False):
|
|
1567
|
+
return flax_model.apply(loaded_state, inputs, train=train)`,
|
|
1568
|
+
];
|
|
1569
|
+
|
|
1538
1570
|
export const vfimamba = (model: ModelData): string[] => [
|
|
1539
1571
|
`from Trainer_finetune import Model
|
|
1540
1572
|
|
|
@@ -1797,9 +1829,9 @@ wav = model.generate(descriptions) # generates 3 samples.`,
|
|
|
1797
1829
|
];
|
|
1798
1830
|
export const anemoi = (model: ModelData): string[] => [
|
|
1799
1831
|
`from anemoi.inference.runners.default import DefaultRunner
|
|
1800
|
-
from anemoi.inference.config import
|
|
1832
|
+
from anemoi.inference.config.run import RunConfiguration
|
|
1801
1833
|
# Create Configuration
|
|
1802
|
-
config =
|
|
1834
|
+
config = RunConfiguration(checkpoint = {"huggingface":"${model.id}"})
|
|
1803
1835
|
# Load Runner
|
|
1804
1836
|
runner = DefaultRunner(config)`,
|
|
1805
1837
|
];
|
package/src/model-libraries.ts
CHANGED
|
@@ -662,6 +662,13 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
662
662
|
filter: false,
|
|
663
663
|
countDownloads: `path:"tokenizer-e351c8d8-checkpoint125.safetensors"`,
|
|
664
664
|
},
|
|
665
|
+
mtvcraft: {
|
|
666
|
+
prettyLabel: "MTVCraft",
|
|
667
|
+
repoName: "MTVCraft",
|
|
668
|
+
repoUrl: "https://github.com/baaivision/MTVCraft",
|
|
669
|
+
filter: false,
|
|
670
|
+
countDownloads: `path:"vae/3d-vae.pt"`,
|
|
671
|
+
},
|
|
665
672
|
nemo: {
|
|
666
673
|
prettyLabel: "NeMo",
|
|
667
674
|
repoName: "NeMo",
|
|
@@ -710,6 +717,13 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
710
717
|
filter: true,
|
|
711
718
|
countDownloads: `path:"model_config.json"`,
|
|
712
719
|
},
|
|
720
|
+
PaddleOCR: {
|
|
721
|
+
prettyLabel: "PaddleOCR",
|
|
722
|
+
repoName: "PaddleOCR",
|
|
723
|
+
repoUrl: "https://github.com/PaddlePaddle/PaddleOCR",
|
|
724
|
+
snippets: snippets.paddleocr,
|
|
725
|
+
filter: true,
|
|
726
|
+
},
|
|
713
727
|
peft: {
|
|
714
728
|
prettyLabel: "PEFT",
|
|
715
729
|
repoName: "PEFT",
|
|
@@ -784,6 +798,13 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
784
798
|
filter: false,
|
|
785
799
|
countDownloads: `path:"model.safetensors"`,
|
|
786
800
|
},
|
|
801
|
+
renderformer: {
|
|
802
|
+
prettyLabel: "RenderFormer",
|
|
803
|
+
repoName: "RenderFormer",
|
|
804
|
+
repoUrl: "https://github.com/microsoft/renderformer",
|
|
805
|
+
snippets: snippets.renderformer,
|
|
806
|
+
filter: false,
|
|
807
|
+
},
|
|
787
808
|
reverb: {
|
|
788
809
|
prettyLabel: "Reverb",
|
|
789
810
|
repoName: "Reverb",
|
|
@@ -1086,6 +1107,13 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
|
|
|
1086
1107
|
countDownloads: `path_extension:"pth"`,
|
|
1087
1108
|
snippets: snippets.sana,
|
|
1088
1109
|
},
|
|
1110
|
+
videoprism: {
|
|
1111
|
+
prettyLabel: "VideoPrism",
|
|
1112
|
+
repoName: "VideoPrism",
|
|
1113
|
+
repoUrl: "https://github.com/google-deepmind/videoprism",
|
|
1114
|
+
countDownloads: `path_extension:"npz"`,
|
|
1115
|
+
snippets: snippets.videoprism,
|
|
1116
|
+
},
|
|
1089
1117
|
"vfi-mamba": {
|
|
1090
1118
|
prettyLabel: "VFIMamba",
|
|
1091
1119
|
repoName: "VFIMamba",
|
package/src/pipelines.ts
CHANGED
|
@@ -237,12 +237,6 @@ export const PIPELINE_DATA = {
|
|
|
237
237
|
type: "language-modeling",
|
|
238
238
|
name: "Language Modeling",
|
|
239
239
|
},
|
|
240
|
-
],
|
|
241
|
-
modality: "nlp",
|
|
242
|
-
},
|
|
243
|
-
"text2text-generation": {
|
|
244
|
-
name: "Text2Text Generation",
|
|
245
|
-
subtasks: [
|
|
246
240
|
{
|
|
247
241
|
type: "text-simplification",
|
|
248
242
|
name: "Text simplification",
|
|
@@ -271,6 +265,10 @@ export const PIPELINE_DATA = {
|
|
|
271
265
|
type: "closed-book-qa",
|
|
272
266
|
name: "Closed Book QA",
|
|
273
267
|
},
|
|
268
|
+
{
|
|
269
|
+
type: "text2text-generation",
|
|
270
|
+
name: "Text2Text Generation",
|
|
271
|
+
},
|
|
274
272
|
],
|
|
275
273
|
modality: "nlp",
|
|
276
274
|
},
|
package/src/snippets/inputs.ts
CHANGED
|
@@ -66,8 +66,6 @@ const inputsTextGeneration = (model: ModelDataMinimal): string | ChatCompletionI
|
|
|
66
66
|
return `"Can you please let us know more details about your "`;
|
|
67
67
|
};
|
|
68
68
|
|
|
69
|
-
const inputsText2TextGeneration = () => `"The answer to the universe is"`;
|
|
70
|
-
|
|
71
69
|
const inputsFillMask = (model: ModelDataMinimal) => `"The answer to the universe is ${model.mask_token}."`;
|
|
72
70
|
|
|
73
71
|
const inputsSentenceSimilarity = () =>
|
|
@@ -147,7 +145,6 @@ const modelInputSnippets: {
|
|
|
147
145
|
"text-to-video": inputsTextToVideo,
|
|
148
146
|
"text-to-speech": inputsTextToSpeech,
|
|
149
147
|
"text-to-audio": inputsTextToAudio,
|
|
150
|
-
"text2text-generation": inputsText2TextGeneration,
|
|
151
148
|
"token-classification": inputsTokenClassification,
|
|
152
149
|
translation: inputsTranslation,
|
|
153
150
|
"zero-shot-classification": inputsZeroShotClassification,
|
package/src/tasks/index.ts
CHANGED
|
@@ -51,6 +51,7 @@ export type * from "./automatic-speech-recognition/inference.js";
|
|
|
51
51
|
export type {
|
|
52
52
|
ChatCompletionInput,
|
|
53
53
|
ChatCompletionInputMessage,
|
|
54
|
+
ChatCompletionInputMessageChunkType,
|
|
54
55
|
ChatCompletionOutput,
|
|
55
56
|
ChatCompletionOutputComplete,
|
|
56
57
|
ChatCompletionOutputMessage,
|
|
@@ -157,7 +158,6 @@ export const TASKS_MODEL_LIBRARIES: Record<PipelineType, ModelLibraryKey[]> = {
|
|
|
157
158
|
"text-to-speech": ["espnet", "tensorflowtts", "transformers", "transformers.js"],
|
|
158
159
|
"text-to-audio": ["transformers", "transformers.js"],
|
|
159
160
|
"text-to-video": ["diffusers"],
|
|
160
|
-
"text2text-generation": ["transformers", "transformers.js"],
|
|
161
161
|
"time-series-forecasting": [],
|
|
162
162
|
"token-classification": [
|
|
163
163
|
"adapter-transformers",
|
|
@@ -244,7 +244,6 @@ export const TASKS_DATA: Record<PipelineType, TaskData | undefined> = {
|
|
|
244
244
|
"text-to-speech": getData("text-to-speech", textToSpeech),
|
|
245
245
|
"text-to-audio": undefined,
|
|
246
246
|
"text-to-video": getData("text-to-video", textToVideo),
|
|
247
|
-
"text2text-generation": undefined,
|
|
248
247
|
"time-series-forecasting": undefined,
|
|
249
248
|
"token-classification": getData("token-classification", tokenClassification),
|
|
250
249
|
translation: getData("translation", translation),
|
|
@@ -14,7 +14,7 @@ const taskData: TaskDataCustom = {
|
|
|
14
14
|
widgetModels: [],
|
|
15
15
|
youtubeId: undefined,
|
|
16
16
|
/// If this is a subtask, link to the most general task ID
|
|
17
|
-
/// (eg,
|
|
17
|
+
/// (eg, text-generation is the canonical ID of text-simplification)
|
|
18
18
|
canonicalId: undefined,
|
|
19
19
|
};
|
|
20
20
|
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
This task covers guides on both [text-generation](https://huggingface.co/models?pipeline_tag=text-generation&sort=downloads) and [text-to-text generation](https://huggingface.co/models?
|
|
1
|
+
This task covers guides on both [text-generation](https://huggingface.co/models?pipeline_tag=text-generation&sort=downloads) and [text-to-text generation](https://huggingface.co/models?other=text2text-generation&sort=downloads) models. Popular large language models that are used for chats or following instructions are also covered in this task. You can find the list of selected open-source large language models [here](https://huggingface.co/spaces/open-llm-leaderboard/open_llm_leaderboard), ranked by their performance scores.
|
|
2
2
|
|
|
3
3
|
## Use Cases
|
|
4
4
|
|
|
@@ -58,7 +58,7 @@ generator("Hello, I'm a language model", max_length = 30, num_return_sequences=3
|
|
|
58
58
|
## {'generated_text': "Hello, I'm a language modeler. I write and maintain software in Python. I love to code, and that includes coding things that require writing"}, ...
|
|
59
59
|
```
|
|
60
60
|
|
|
61
|
-
[Text-to-Text generation models](https://huggingface.co/models?
|
|
61
|
+
[Text-to-Text generation models](https://huggingface.co/models?other=text2text-generation&sort=downloads) have a separate pipeline called `text2text-generation`. This pipeline takes an input containing the sentence including the task and returns the output of the accomplished task.
|
|
62
62
|
|
|
63
63
|
```python
|
|
64
64
|
from transformers import pipeline
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import type { TaskDataCustom } from "../index.js";
|
|
2
2
|
|
|
3
3
|
const taskData: TaskDataCustom = {
|
|
4
|
-
canonicalId: "
|
|
4
|
+
canonicalId: "text-generation",
|
|
5
5
|
datasets: [
|
|
6
6
|
{
|
|
7
7
|
description: "A dataset of copyright-free books translated into 16 different languages.",
|
|
@@ -1,52 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Inference code generated from the JSON schema spec in ./spec
|
|
3
|
-
*
|
|
4
|
-
* Using src/scripts/inference-codegen
|
|
5
|
-
*/
|
|
6
|
-
/**
|
|
7
|
-
* Inputs for Text2text Generation inference
|
|
8
|
-
*/
|
|
9
|
-
export interface Text2TextGenerationInput {
|
|
10
|
-
/**
|
|
11
|
-
* The input text data
|
|
12
|
-
*/
|
|
13
|
-
inputs: string;
|
|
14
|
-
/**
|
|
15
|
-
* Additional inference parameters for Text2text Generation
|
|
16
|
-
*/
|
|
17
|
-
parameters?: Text2TextGenerationParameters;
|
|
18
|
-
[property: string]: unknown;
|
|
19
|
-
}
|
|
20
|
-
/**
|
|
21
|
-
* Additional inference parameters for Text2text Generation
|
|
22
|
-
*/
|
|
23
|
-
export interface Text2TextGenerationParameters {
|
|
24
|
-
/**
|
|
25
|
-
* Whether to clean up the potential extra spaces in the text output.
|
|
26
|
-
*/
|
|
27
|
-
clean_up_tokenization_spaces?: boolean;
|
|
28
|
-
/**
|
|
29
|
-
* Additional parametrization of the text generation algorithm
|
|
30
|
-
*/
|
|
31
|
-
generate_parameters?: {
|
|
32
|
-
[key: string]: unknown;
|
|
33
|
-
};
|
|
34
|
-
/**
|
|
35
|
-
* The truncation strategy to use
|
|
36
|
-
*/
|
|
37
|
-
truncation?: Text2TextGenerationTruncationStrategy;
|
|
38
|
-
[property: string]: unknown;
|
|
39
|
-
}
|
|
40
|
-
export type Text2TextGenerationTruncationStrategy = "do_not_truncate" | "longest_first" | "only_first" | "only_second";
|
|
41
|
-
/**
|
|
42
|
-
* Outputs of inference for the Text2text Generation task
|
|
43
|
-
*/
|
|
44
|
-
export interface Text2TextGenerationOutput {
|
|
45
|
-
generatedText: unknown;
|
|
46
|
-
/**
|
|
47
|
-
* The generated text.
|
|
48
|
-
*/
|
|
49
|
-
generated_text?: string;
|
|
50
|
-
[property: string]: unknown;
|
|
51
|
-
}
|
|
52
|
-
//# sourceMappingURL=inference.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"inference.d.ts","sourceRoot":"","sources":["../../../../src/tasks/text2text-generation/inference.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AACH;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACxC;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,6BAA6B,CAAC;IAC3C,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD;;GAEG;AACH,MAAM,WAAW,6BAA6B;IAC7C;;OAEG;IACH,4BAA4B,CAAC,EAAE,OAAO,CAAC;IACvC;;OAEG;IACH,mBAAmB,CAAC,EAAE;QACrB,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC;KACvB,CAAC;IACF;;OAEG;IACH,UAAU,CAAC,EAAE,qCAAqC,CAAC;IACnD,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B;AACD,MAAM,MAAM,qCAAqC,GAAG,iBAAiB,GAAG,eAAe,GAAG,YAAY,GAAG,aAAa,CAAC;AACvH;;GAEG;AACH,MAAM,WAAW,yBAAyB;IACzC,aAAa,EAAE,OAAO,CAAC;IACvB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC;CAC5B"}
|