@huggingface/tasks 0.1.3 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -37,12 +37,13 @@ declare enum ModelLibrary {
37
37
  "stanza" = "Stanza",
38
38
  "fasttext" = "fastText",
39
39
  "stable-baselines3" = "Stable-Baselines3",
40
- "ml-agents" = "ML-Agents",
40
+ "ml-agents" = "Unity ML-Agents",
41
41
  "pythae" = "Pythae",
42
- "mindspore" = "MindSpore"
42
+ "mindspore" = "MindSpore",
43
+ "unity-sentis" = "Unity Sentis"
43
44
  }
44
45
  type ModelLibraryKey = keyof typeof ModelLibrary;
45
- declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: string[];
46
+ declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("speechbrain" | "fastai" | "adapter-transformers" | "allennlp" | "asteroid" | "bertopic" | "diffusers" | "doctr" | "espnet" | "fairseq" | "flair" | "keras" | "k2" | "mlx" | "nemo" | "open_clip" | "paddlenlp" | "peft" | "pyannote-audio" | "sample-factory" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "tensorflowtts" | "timm" | "transformers" | "transformers.js" | "stanza" | "fasttext" | "stable-baselines3" | "ml-agents" | "pythae" | "mindspore" | "unity-sentis")[];
46
47
 
47
48
  declare const MODALITIES: readonly ["cv", "nlp", "audio", "tabular", "multimodal", "rl", "other"];
48
49
  type Modality = (typeof MODALITIES)[number];
@@ -742,11 +743,6 @@ interface TaskData {
742
743
  }
743
744
  type TaskDataCustom = Omit<TaskData, "id" | "label" | "libraries">;
744
745
 
745
- declare const TAG_NFAA_CONTENT = "not-for-all-audiences";
746
- declare const OTHER_TAGS_SUGGESTIONS: string[];
747
- declare const TAG_TEXT_GENERATION_INFERENCE = "text-generation-inference";
748
- declare const TAG_CUSTOM_CODE = "custom_code";
749
-
750
746
  declare function getModelInputSnippet(model: ModelData, noWrap?: boolean, noQuotes?: boolean): string;
751
747
 
752
748
  declare const inputs_getModelInputSnippet: typeof getModelInputSnippet;
@@ -854,4 +850,4 @@ declare namespace index {
854
850
  };
855
851
  }
856
852
 
857
- export { ALL_DISPLAY_MODEL_LIBRARY_KEYS, ExampleRepo, InferenceDisplayability, LIBRARY_TASK_MAPPING_EXCLUDING_TRANSFORMERS, LibraryUiElement, MAPPING_DEFAULT_WIDGET, MODALITIES, MODALITY_LABELS, MODEL_LIBRARIES_UI_ELEMENTS, Modality, ModelData, ModelLibrary, ModelLibraryKey, OTHER_TAGS_SUGGESTIONS, PIPELINE_DATA, PIPELINE_TYPES, PIPELINE_TYPES_SET, PipelineData, PipelineType, SUBTASK_TYPES, TAG_CUSTOM_CODE, TAG_NFAA_CONTENT, TAG_TEXT_GENERATION_INFERENCE, TASKS_DATA, TASKS_MODEL_LIBRARIES, TaskData, TaskDataCustom, TaskDemo, TaskDemoEntry, TransformersInfo, WidgetExample, WidgetExampleAssetAndPromptInput, WidgetExampleAssetAndTextInput, WidgetExampleAssetAndZeroShotInput, WidgetExampleAssetInput, WidgetExampleAttribute, WidgetExampleOutput, WidgetExampleOutputAnswerScore, WidgetExampleOutputLabels, WidgetExampleOutputText, WidgetExampleOutputUrl, WidgetExampleSentenceSimilarityInput, WidgetExampleStructuredDataInput, WidgetExampleTableDataInput, WidgetExampleTextAndContextInput, WidgetExampleTextAndTableInput, WidgetExampleTextInput, WidgetExampleZeroShotTextInput, index as snippets };
853
+ export { ALL_DISPLAY_MODEL_LIBRARY_KEYS, ExampleRepo, InferenceDisplayability, LIBRARY_TASK_MAPPING_EXCLUDING_TRANSFORMERS, LibraryUiElement, MAPPING_DEFAULT_WIDGET, MODALITIES, MODALITY_LABELS, MODEL_LIBRARIES_UI_ELEMENTS, Modality, ModelData, ModelLibrary, ModelLibraryKey, PIPELINE_DATA, PIPELINE_TYPES, PIPELINE_TYPES_SET, PipelineData, PipelineType, SUBTASK_TYPES, TASKS_DATA, TASKS_MODEL_LIBRARIES, TaskData, TaskDataCustom, TaskDemo, TaskDemoEntry, TransformersInfo, WidgetExample, WidgetExampleAssetAndPromptInput, WidgetExampleAssetAndTextInput, WidgetExampleAssetAndZeroShotInput, WidgetExampleAssetInput, WidgetExampleAttribute, WidgetExampleOutput, WidgetExampleOutputAnswerScore, WidgetExampleOutputLabels, WidgetExampleOutputText, WidgetExampleOutputUrl, WidgetExampleSentenceSimilarityInput, WidgetExampleStructuredDataInput, WidgetExampleTableDataInput, WidgetExampleTextAndContextInput, WidgetExampleTextAndTableInput, WidgetExampleTextInput, WidgetExampleZeroShotTextInput, index as snippets };
package/dist/index.js CHANGED
@@ -28,14 +28,10 @@ __export(src_exports, {
28
28
  MODALITY_LABELS: () => MODALITY_LABELS,
29
29
  MODEL_LIBRARIES_UI_ELEMENTS: () => MODEL_LIBRARIES_UI_ELEMENTS,
30
30
  ModelLibrary: () => ModelLibrary,
31
- OTHER_TAGS_SUGGESTIONS: () => OTHER_TAGS_SUGGESTIONS,
32
31
  PIPELINE_DATA: () => PIPELINE_DATA,
33
32
  PIPELINE_TYPES: () => PIPELINE_TYPES,
34
33
  PIPELINE_TYPES_SET: () => PIPELINE_TYPES_SET,
35
34
  SUBTASK_TYPES: () => SUBTASK_TYPES,
36
- TAG_CUSTOM_CODE: () => TAG_CUSTOM_CODE,
37
- TAG_NFAA_CONTENT: () => TAG_NFAA_CONTENT,
38
- TAG_TEXT_GENERATION_INFERENCE: () => TAG_TEXT_GENERATION_INFERENCE,
39
35
  TASKS_DATA: () => TASKS_DATA,
40
36
  TASKS_MODEL_LIBRARIES: () => TASKS_MODEL_LIBRARIES,
41
37
  snippets: () => snippets_exports
@@ -531,6 +527,13 @@ transcriptions = asr_model.transcribe(["file.wav"])`
531
527
  }
532
528
  };
533
529
  var mlAgents = (model) => [`mlagents-load-from-hf --repo-id="${model.id}" --local-dir="./downloads"`];
530
+ var sentis = (model) => [
531
+ `string modelName = "[Your model name here].sentis";
532
+ Model model = ModelLoader.Load(Application.streamingAssetsPath + "/" + modelName);
533
+ IWorker engine = WorkerFactory.CreateWorker(BackendType.GPUCompute, model);
534
+ // Please see provided C# file for more details
535
+ `
536
+ ];
534
537
  var mlx = (model) => [
535
538
  `pip install huggingface_hub hf_transfer
536
539
 
@@ -753,10 +756,16 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
753
756
  "ml-agents": {
754
757
  btnLabel: "ml-agents",
755
758
  repoName: "ml-agents",
756
- repoUrl: "https://github.com/huggingface/ml-agents",
759
+ repoUrl: "https://github.com/Unity-Technologies/ml-agents",
757
760
  docsUrl: "https://huggingface.co/docs/hub/ml-agents",
758
761
  snippets: mlAgents
759
762
  },
763
+ "unity-sentis": {
764
+ btnLabel: "unity-sentis",
765
+ repoName: "unity-sentis",
766
+ repoUrl: "https://github.com/Unity-Technologies/sentis-samples",
767
+ snippets: sentis
768
+ },
760
769
  pythae: {
761
770
  btnLabel: "pythae",
762
771
  repoName: "pythae",
@@ -4622,12 +4631,14 @@ var ModelLibrary = /* @__PURE__ */ ((ModelLibrary2) => {
4622
4631
  ModelLibrary2["stanza"] = "Stanza";
4623
4632
  ModelLibrary2["fasttext"] = "fastText";
4624
4633
  ModelLibrary2["stable-baselines3"] = "Stable-Baselines3";
4625
- ModelLibrary2["ml-agents"] = "ML-Agents";
4634
+ ModelLibrary2["ml-agents"] = "Unity ML-Agents";
4626
4635
  ModelLibrary2["pythae"] = "Pythae";
4627
4636
  ModelLibrary2["mindspore"] = "MindSpore";
4637
+ ModelLibrary2["unity-sentis"] = "Unity Sentis";
4628
4638
  return ModelLibrary2;
4629
4639
  })(ModelLibrary || {});
4630
- var ALL_DISPLAY_MODEL_LIBRARY_KEYS = Object.keys(ModelLibrary).filter(
4640
+ var ALL_MODEL_LIBRARY_KEYS = Object.keys(ModelLibrary);
4641
+ var ALL_DISPLAY_MODEL_LIBRARY_KEYS = ALL_MODEL_LIBRARY_KEYS.filter(
4631
4642
  (k) => !["doctr", "k2", "mindspore", "tensorflowtts"].includes(k)
4632
4643
  );
4633
4644
 
@@ -4642,23 +4653,6 @@ var InferenceDisplayability = /* @__PURE__ */ ((InferenceDisplayability2) => {
4642
4653
  return InferenceDisplayability2;
4643
4654
  })(InferenceDisplayability || {});
4644
4655
 
4645
- // src/tags.ts
4646
- var TAG_NFAA_CONTENT = "not-for-all-audiences";
4647
- var OTHER_TAGS_SUGGESTIONS = [
4648
- "chemistry",
4649
- "biology",
4650
- "finance",
4651
- "legal",
4652
- "music",
4653
- "art",
4654
- "code",
4655
- "climate",
4656
- "medical",
4657
- TAG_NFAA_CONTENT
4658
- ];
4659
- var TAG_TEXT_GENERATION_INFERENCE = "text-generation-inference";
4660
- var TAG_CUSTOM_CODE = "custom_code";
4661
-
4662
4656
  // src/snippets/index.ts
4663
4657
  var snippets_exports = {};
4664
4658
  __export(snippets_exports, {
@@ -5130,14 +5124,10 @@ function hasJsInferenceSnippet(model) {
5130
5124
  MODALITY_LABELS,
5131
5125
  MODEL_LIBRARIES_UI_ELEMENTS,
5132
5126
  ModelLibrary,
5133
- OTHER_TAGS_SUGGESTIONS,
5134
5127
  PIPELINE_DATA,
5135
5128
  PIPELINE_TYPES,
5136
5129
  PIPELINE_TYPES_SET,
5137
5130
  SUBTASK_TYPES,
5138
- TAG_CUSTOM_CODE,
5139
- TAG_NFAA_CONTENT,
5140
- TAG_TEXT_GENERATION_INFERENCE,
5141
5131
  TASKS_DATA,
5142
5132
  TASKS_MODEL_LIBRARIES,
5143
5133
  snippets
package/dist/index.mjs CHANGED
@@ -493,6 +493,13 @@ transcriptions = asr_model.transcribe(["file.wav"])`
493
493
  }
494
494
  };
495
495
  var mlAgents = (model) => [`mlagents-load-from-hf --repo-id="${model.id}" --local-dir="./downloads"`];
496
+ var sentis = (model) => [
497
+ `string modelName = "[Your model name here].sentis";
498
+ Model model = ModelLoader.Load(Application.streamingAssetsPath + "/" + modelName);
499
+ IWorker engine = WorkerFactory.CreateWorker(BackendType.GPUCompute, model);
500
+ // Please see provided C# file for more details
501
+ `
502
+ ];
496
503
  var mlx = (model) => [
497
504
  `pip install huggingface_hub hf_transfer
498
505
 
@@ -715,10 +722,16 @@ var MODEL_LIBRARIES_UI_ELEMENTS = {
715
722
  "ml-agents": {
716
723
  btnLabel: "ml-agents",
717
724
  repoName: "ml-agents",
718
- repoUrl: "https://github.com/huggingface/ml-agents",
725
+ repoUrl: "https://github.com/Unity-Technologies/ml-agents",
719
726
  docsUrl: "https://huggingface.co/docs/hub/ml-agents",
720
727
  snippets: mlAgents
721
728
  },
729
+ "unity-sentis": {
730
+ btnLabel: "unity-sentis",
731
+ repoName: "unity-sentis",
732
+ repoUrl: "https://github.com/Unity-Technologies/sentis-samples",
733
+ snippets: sentis
734
+ },
722
735
  pythae: {
723
736
  btnLabel: "pythae",
724
737
  repoName: "pythae",
@@ -4584,12 +4597,14 @@ var ModelLibrary = /* @__PURE__ */ ((ModelLibrary2) => {
4584
4597
  ModelLibrary2["stanza"] = "Stanza";
4585
4598
  ModelLibrary2["fasttext"] = "fastText";
4586
4599
  ModelLibrary2["stable-baselines3"] = "Stable-Baselines3";
4587
- ModelLibrary2["ml-agents"] = "ML-Agents";
4600
+ ModelLibrary2["ml-agents"] = "Unity ML-Agents";
4588
4601
  ModelLibrary2["pythae"] = "Pythae";
4589
4602
  ModelLibrary2["mindspore"] = "MindSpore";
4603
+ ModelLibrary2["unity-sentis"] = "Unity Sentis";
4590
4604
  return ModelLibrary2;
4591
4605
  })(ModelLibrary || {});
4592
- var ALL_DISPLAY_MODEL_LIBRARY_KEYS = Object.keys(ModelLibrary).filter(
4606
+ var ALL_MODEL_LIBRARY_KEYS = Object.keys(ModelLibrary);
4607
+ var ALL_DISPLAY_MODEL_LIBRARY_KEYS = ALL_MODEL_LIBRARY_KEYS.filter(
4593
4608
  (k) => !["doctr", "k2", "mindspore", "tensorflowtts"].includes(k)
4594
4609
  );
4595
4610
 
@@ -4604,23 +4619,6 @@ var InferenceDisplayability = /* @__PURE__ */ ((InferenceDisplayability2) => {
4604
4619
  return InferenceDisplayability2;
4605
4620
  })(InferenceDisplayability || {});
4606
4621
 
4607
- // src/tags.ts
4608
- var TAG_NFAA_CONTENT = "not-for-all-audiences";
4609
- var OTHER_TAGS_SUGGESTIONS = [
4610
- "chemistry",
4611
- "biology",
4612
- "finance",
4613
- "legal",
4614
- "music",
4615
- "art",
4616
- "code",
4617
- "climate",
4618
- "medical",
4619
- TAG_NFAA_CONTENT
4620
- ];
4621
- var TAG_TEXT_GENERATION_INFERENCE = "text-generation-inference";
4622
- var TAG_CUSTOM_CODE = "custom_code";
4623
-
4624
4622
  // src/snippets/index.ts
4625
4623
  var snippets_exports = {};
4626
4624
  __export(snippets_exports, {
@@ -5091,14 +5089,10 @@ export {
5091
5089
  MODALITY_LABELS,
5092
5090
  MODEL_LIBRARIES_UI_ELEMENTS,
5093
5091
  ModelLibrary,
5094
- OTHER_TAGS_SUGGESTIONS,
5095
5092
  PIPELINE_DATA,
5096
5093
  PIPELINE_TYPES,
5097
5094
  PIPELINE_TYPES_SET,
5098
5095
  SUBTASK_TYPES,
5099
- TAG_CUSTOM_CODE,
5100
- TAG_NFAA_CONTENT,
5101
- TAG_TEXT_GENERATION_INFERENCE,
5102
5096
  TASKS_DATA,
5103
5097
  TASKS_MODEL_LIBRARIES,
5104
5098
  snippets_exports as snippets
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@huggingface/tasks",
3
3
  "packageManager": "pnpm@8.10.5",
4
- "version": "0.1.3",
4
+ "version": "0.2.1",
5
5
  "description": "List of ML tasks for huggingface.co/tasks",
6
6
  "repository": "https://github.com/huggingface/huggingface.js.git",
7
7
  "publishConfig": {
package/src/index.ts CHANGED
@@ -39,8 +39,6 @@ export type {
39
39
  } from "./widget-example";
40
40
  export { InferenceDisplayability } from "./model-data";
41
41
 
42
- export { TAG_NFAA_CONTENT, OTHER_TAGS_SUGGESTIONS, TAG_TEXT_GENERATION_INFERENCE, TAG_CUSTOM_CODE } from "./tags";
43
-
44
42
  import * as snippets from "./snippets";
45
43
  export { snippets };
46
44
 
@@ -541,6 +541,14 @@ transcriptions = asr_model.transcribe(["file.wav"])`,
541
541
 
542
542
  const mlAgents = (model: ModelData) => [`mlagents-load-from-hf --repo-id="${model.id}" --local-dir="./downloads"`];
543
543
 
544
+ const sentis = (model: ModelData) => [
545
+ `string modelName = "[Your model name here].sentis";
546
+ Model model = ModelLoader.Load(Application.streamingAssetsPath + "/" + modelName);
547
+ IWorker engine = WorkerFactory.CreateWorker(BackendType.GPUCompute, model);
548
+ // Please see provided C# file for more details
549
+ `
550
+ ];
551
+
544
552
  const mlx = (model: ModelData) => [
545
553
  `pip install huggingface_hub hf_transfer
546
554
 
@@ -770,10 +778,16 @@ export const MODEL_LIBRARIES_UI_ELEMENTS: Partial<Record<ModelLibraryKey, Librar
770
778
  "ml-agents": {
771
779
  btnLabel: "ml-agents",
772
780
  repoName: "ml-agents",
773
- repoUrl: "https://github.com/huggingface/ml-agents",
781
+ repoUrl: "https://github.com/Unity-Technologies/ml-agents",
774
782
  docsUrl: "https://huggingface.co/docs/hub/ml-agents",
775
783
  snippets: mlAgents,
776
784
  },
785
+ "unity-sentis": {
786
+ btnLabel: "unity-sentis",
787
+ repoName: "unity-sentis",
788
+ repoUrl: "https://github.com/Unity-Technologies/sentis-samples",
789
+ snippets: sentis,
790
+ },
777
791
  pythae: {
778
792
  btnLabel: "pythae",
779
793
  repoName: "pythae",
@@ -37,13 +37,16 @@ export enum ModelLibrary {
37
37
  "stanza" = "Stanza",
38
38
  "fasttext" = "fastText",
39
39
  "stable-baselines3" = "Stable-Baselines3",
40
- "ml-agents" = "ML-Agents",
40
+ "ml-agents" = "Unity ML-Agents",
41
41
  "pythae" = "Pythae",
42
42
  "mindspore" = "MindSpore",
43
+ "unity-sentis" = "Unity Sentis",
43
44
  }
44
45
 
45
46
  export type ModelLibraryKey = keyof typeof ModelLibrary;
46
47
 
47
- export const ALL_DISPLAY_MODEL_LIBRARY_KEYS = Object.keys(ModelLibrary).filter(
48
+ export const ALL_MODEL_LIBRARY_KEYS = Object.keys(ModelLibrary) as ModelLibraryKey[];
49
+
50
+ export const ALL_DISPLAY_MODEL_LIBRARY_KEYS = ALL_MODEL_LIBRARY_KEYS.filter(
48
51
  (k) => !["doctr", "k2", "mindspore", "tensorflowtts"].includes(k)
49
52
  );
File without changes
@@ -0,0 +1,18 @@
1
+ import type { TaskDataCustom } from "..";
2
+
3
+ const taskData: TaskDataCustom = {
4
+ datasets: [],
5
+ demo: {
6
+ inputs: [],
7
+ outputs: [],
8
+ },
9
+ metrics: [],
10
+ models: [],
11
+ spaces: [],
12
+ summary:
13
+ "Mask generation is creating a binary image that identifies a specific object or region of interest in an input image. Masks are often used in segmentation tasks, where they provide a precise way to isolate the object of interest for further processing or analysis.",
14
+ widgetModels: [],
15
+ youtubeId: "",
16
+ };
17
+
18
+ export default taskData;
@@ -0,0 +1,39 @@
1
+ ## Use Cases
2
+
3
+ ### Object Search
4
+
5
+ Zero-shot object detection models can be used in image search. Smartphones, for example, use zero-shot object detection models to detect entities (such as specific places or objects) and allow the user to search for the entity on the internet.
6
+
7
+ ### Object Counting
8
+
9
+ Zero-shot object detection models are used to count instances of objects in a given image. This can include counting the objects in warehouses or stores or the number of visitors in a store. They are also used to manage crowds at events to prevent disasters.
10
+
11
+ ## Inference
12
+
13
+ You can infer with zero-shot object detection models through the `zero-shot-object-detection` pipeline. When calling the pipeline, you just need to specify a path or HTTP link to an image and the candidate labels.
14
+
15
+ ```python
16
+ from transformers import pipeline
17
+ from PIL import Image
18
+
19
+ image = Image.open("my-image.png").convert("RGB")
20
+
21
+ detector = pipeline(model="google/owlvit-base-patch32", task="zero-shot-object-detection")
22
+
23
+ predictions = detector(
24
+ image,
25
+ candidate_labels=["a photo of a cat", "a photo of a dog"],
26
+ )
27
+
28
+ # [{'score': 0.95,
29
+ # 'label': 'a photo of a cat',
30
+ # 'box': {'xmin': 180, 'ymin': 71, 'xmax': 271, 'ymax': 178}},
31
+ # ...
32
+ # ]
33
+ ```
34
+
35
+ # Useful Resources
36
+
37
+ - [Zero-shot object detection task guide](https://huggingface.co/docs/transformers/tasks/zero_shot_object_detection)
38
+
39
+ This page was made possible thanks to the efforts of [Victor Guichard](https://huggingface.co/VictorGuichard)
@@ -0,0 +1,57 @@
1
+ import type { TaskDataCustom } from "..";
2
+
3
+ const taskData: TaskDataCustom = {
4
+ datasets: [],
5
+ demo: {
6
+ inputs: [
7
+ {
8
+ filename: "zero-shot-object-detection-input.jpg",
9
+ type: "img",
10
+ },
11
+ {
12
+ label: "Classes",
13
+ content: "cat, dog, bird",
14
+ type: "text",
15
+ },
16
+ ],
17
+ outputs: [
18
+ {
19
+ filename: "zero-shot-object-detection-output.jpg",
20
+ type: "img",
21
+ },
22
+ ],
23
+ },
24
+ metrics: [
25
+ {
26
+ description:
27
+ "The Average Precision (AP) metric is the Area Under the PR Curve (AUC-PR). It is calculated for each class separately",
28
+ id: "Average Precision",
29
+ },
30
+ {
31
+ description: "The Mean Average Precision (mAP) metric is the overall average of the AP values",
32
+ id: "Mean Average Precision",
33
+ },
34
+ {
35
+ description:
36
+ "The APα metric is the Average Precision at the IoU threshold of a α value, for example, AP50 and AP75",
37
+ id: "APα",
38
+ },
39
+ ],
40
+ models: [
41
+ {
42
+ description: "Solid zero-shot object detection model that uses CLIP as backbone.",
43
+ id: "google/owlvit-base-patch32",
44
+ },
45
+ {
46
+ description: "The improved version of the owlvit model.",
47
+ id: "google/owlv2-base-patch16-ensemble",
48
+ },
49
+ ],
50
+ spaces: [],
51
+ summary:
52
+ "Zero-shot object detection is a computer vision task to detect objects and their classes in images, without any prior training or knowledge of the classes. Zero-shot object detection models receive an image as input, as well as a list of candidate classes, and output the bounding boxes and labels where the objects have been detected.",
53
+ widgetModels: [],
54
+ youtubeId: "",
55
+ };
56
+
57
+ export default taskData;
package/src/tags.ts DELETED
@@ -1,15 +0,0 @@
1
- export const TAG_NFAA_CONTENT = "not-for-all-audiences";
2
- export const OTHER_TAGS_SUGGESTIONS = [
3
- "chemistry",
4
- "biology",
5
- "finance",
6
- "legal",
7
- "music",
8
- "art",
9
- "code",
10
- "climate",
11
- "medical",
12
- TAG_NFAA_CONTENT,
13
- ];
14
- export const TAG_TEXT_GENERATION_INFERENCE = "text-generation-inference";
15
- export const TAG_CUSTOM_CODE = "custom_code";