huggingface-hub 0.28.1__py3-none-any.whl → 0.29.0rc0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of huggingface-hub might be problematic. Click here for more details.

Files changed (63) hide show
  1. huggingface_hub/__init__.py +1 -4
  2. huggingface_hub/constants.py +16 -10
  3. huggingface_hub/file_download.py +10 -6
  4. huggingface_hub/hf_api.py +53 -23
  5. huggingface_hub/inference/_client.py +151 -84
  6. huggingface_hub/inference/_common.py +3 -27
  7. huggingface_hub/inference/_generated/_async_client.py +147 -83
  8. huggingface_hub/inference/_generated/types/__init__.py +1 -1
  9. huggingface_hub/inference/_generated/types/audio_classification.py +4 -5
  10. huggingface_hub/inference/_generated/types/audio_to_audio.py +3 -4
  11. huggingface_hub/inference/_generated/types/automatic_speech_recognition.py +7 -8
  12. huggingface_hub/inference/_generated/types/base.py +21 -0
  13. huggingface_hub/inference/_generated/types/chat_completion.py +29 -30
  14. huggingface_hub/inference/_generated/types/depth_estimation.py +3 -4
  15. huggingface_hub/inference/_generated/types/document_question_answering.py +5 -6
  16. huggingface_hub/inference/_generated/types/feature_extraction.py +5 -6
  17. huggingface_hub/inference/_generated/types/fill_mask.py +4 -5
  18. huggingface_hub/inference/_generated/types/image_classification.py +4 -5
  19. huggingface_hub/inference/_generated/types/image_segmentation.py +4 -5
  20. huggingface_hub/inference/_generated/types/image_to_image.py +5 -6
  21. huggingface_hub/inference/_generated/types/image_to_text.py +5 -6
  22. huggingface_hub/inference/_generated/types/object_detection.py +5 -6
  23. huggingface_hub/inference/_generated/types/question_answering.py +5 -6
  24. huggingface_hub/inference/_generated/types/sentence_similarity.py +3 -4
  25. huggingface_hub/inference/_generated/types/summarization.py +4 -5
  26. huggingface_hub/inference/_generated/types/table_question_answering.py +5 -6
  27. huggingface_hub/inference/_generated/types/text2text_generation.py +4 -5
  28. huggingface_hub/inference/_generated/types/text_classification.py +4 -5
  29. huggingface_hub/inference/_generated/types/text_generation.py +12 -13
  30. huggingface_hub/inference/_generated/types/text_to_audio.py +5 -6
  31. huggingface_hub/inference/_generated/types/text_to_image.py +8 -15
  32. huggingface_hub/inference/_generated/types/text_to_speech.py +5 -6
  33. huggingface_hub/inference/_generated/types/text_to_video.py +4 -5
  34. huggingface_hub/inference/_generated/types/token_classification.py +4 -5
  35. huggingface_hub/inference/_generated/types/translation.py +4 -5
  36. huggingface_hub/inference/_generated/types/video_classification.py +4 -5
  37. huggingface_hub/inference/_generated/types/visual_question_answering.py +5 -6
  38. huggingface_hub/inference/_generated/types/zero_shot_classification.py +4 -5
  39. huggingface_hub/inference/_generated/types/zero_shot_image_classification.py +4 -5
  40. huggingface_hub/inference/_generated/types/zero_shot_object_detection.py +5 -6
  41. huggingface_hub/inference/_providers/__init__.py +44 -8
  42. huggingface_hub/inference/_providers/_common.py +239 -0
  43. huggingface_hub/inference/_providers/black_forest_labs.py +66 -0
  44. huggingface_hub/inference/_providers/fal_ai.py +31 -100
  45. huggingface_hub/inference/_providers/fireworks_ai.py +6 -0
  46. huggingface_hub/inference/_providers/hf_inference.py +58 -142
  47. huggingface_hub/inference/_providers/hyperbolic.py +43 -0
  48. huggingface_hub/inference/_providers/nebius.py +41 -0
  49. huggingface_hub/inference/_providers/novita.py +26 -0
  50. huggingface_hub/inference/_providers/replicate.py +24 -119
  51. huggingface_hub/inference/_providers/sambanova.py +3 -86
  52. huggingface_hub/inference/_providers/together.py +36 -130
  53. huggingface_hub/utils/_headers.py +5 -0
  54. huggingface_hub/utils/_hf_folder.py +4 -32
  55. huggingface_hub/utils/_http.py +85 -2
  56. huggingface_hub/utils/_typing.py +1 -1
  57. huggingface_hub/utils/logging.py +6 -0
  58. {huggingface_hub-0.28.1.dist-info → huggingface_hub-0.29.0rc0.dist-info}/METADATA +1 -1
  59. {huggingface_hub-0.28.1.dist-info → huggingface_hub-0.29.0rc0.dist-info}/RECORD +63 -57
  60. {huggingface_hub-0.28.1.dist-info → huggingface_hub-0.29.0rc0.dist-info}/LICENSE +0 -0
  61. {huggingface_hub-0.28.1.dist-info → huggingface_hub-0.29.0rc0.dist-info}/WHEEL +0 -0
  62. {huggingface_hub-0.28.1.dist-info → huggingface_hub-0.29.0rc0.dist-info}/entry_points.txt +0 -0
  63. {huggingface_hub-0.28.1.dist-info → huggingface_hub-0.29.0rc0.dist-info}/top_level.txt +0 -0
@@ -3,13 +3,12 @@
3
3
  # See:
4
4
  # - script: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-codegen.ts
5
5
  # - specs: https://github.com/huggingface/huggingface.js/tree/main/packages/tasks/src/tasks.
6
- from dataclasses import dataclass
7
6
  from typing import Optional
8
7
 
9
- from .base import BaseInferenceType
8
+ from .base import BaseInferenceType, dataclass_with_extra
10
9
 
11
10
 
12
- @dataclass
11
+ @dataclass_with_extra
13
12
  class QuestionAnsweringInputData(BaseInferenceType):
14
13
  """One (context, question) pair to answer"""
15
14
 
@@ -19,7 +18,7 @@ class QuestionAnsweringInputData(BaseInferenceType):
19
18
  """The question to be answered"""
20
19
 
21
20
 
22
- @dataclass
21
+ @dataclass_with_extra
23
22
  class QuestionAnsweringParameters(BaseInferenceType):
24
23
  """Additional inference parameters for Question Answering"""
25
24
 
@@ -51,7 +50,7 @@ class QuestionAnsweringParameters(BaseInferenceType):
51
50
  """
52
51
 
53
52
 
54
- @dataclass
53
+ @dataclass_with_extra
55
54
  class QuestionAnsweringInput(BaseInferenceType):
56
55
  """Inputs for Question Answering inference"""
57
56
 
@@ -61,7 +60,7 @@ class QuestionAnsweringInput(BaseInferenceType):
61
60
  """Additional inference parameters for Question Answering"""
62
61
 
63
62
 
64
- @dataclass
63
+ @dataclass_with_extra
65
64
  class QuestionAnsweringOutputElement(BaseInferenceType):
66
65
  """Outputs of inference for the Question Answering task"""
67
66
 
@@ -3,13 +3,12 @@
3
3
  # See:
4
4
  # - script: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-codegen.ts
5
5
  # - specs: https://github.com/huggingface/huggingface.js/tree/main/packages/tasks/src/tasks.
6
- from dataclasses import dataclass
7
6
  from typing import Any, Dict, List, Optional
8
7
 
9
- from .base import BaseInferenceType
8
+ from .base import BaseInferenceType, dataclass_with_extra
10
9
 
11
10
 
12
- @dataclass
11
+ @dataclass_with_extra
13
12
  class SentenceSimilarityInputData(BaseInferenceType):
14
13
  sentences: List[str]
15
14
  """A list of strings which will be compared against the source_sentence."""
@@ -19,7 +18,7 @@ class SentenceSimilarityInputData(BaseInferenceType):
19
18
  """
20
19
 
21
20
 
22
- @dataclass
21
+ @dataclass_with_extra
23
22
  class SentenceSimilarityInput(BaseInferenceType):
24
23
  """Inputs for Sentence similarity inference"""
25
24
 
@@ -3,16 +3,15 @@
3
3
  # See:
4
4
  # - script: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-codegen.ts
5
5
  # - specs: https://github.com/huggingface/huggingface.js/tree/main/packages/tasks/src/tasks.
6
- from dataclasses import dataclass
7
6
  from typing import Any, Dict, Literal, Optional
8
7
 
9
- from .base import BaseInferenceType
8
+ from .base import BaseInferenceType, dataclass_with_extra
10
9
 
11
10
 
12
11
  SummarizationTruncationStrategy = Literal["do_not_truncate", "longest_first", "only_first", "only_second"]
13
12
 
14
13
 
15
- @dataclass
14
+ @dataclass_with_extra
16
15
  class SummarizationParameters(BaseInferenceType):
17
16
  """Additional inference parameters for summarization."""
18
17
 
@@ -24,7 +23,7 @@ class SummarizationParameters(BaseInferenceType):
24
23
  """The truncation strategy to use."""
25
24
 
26
25
 
27
- @dataclass
26
+ @dataclass_with_extra
28
27
  class SummarizationInput(BaseInferenceType):
29
28
  """Inputs for Summarization inference"""
30
29
 
@@ -34,7 +33,7 @@ class SummarizationInput(BaseInferenceType):
34
33
  """Additional inference parameters for summarization."""
35
34
 
36
35
 
37
- @dataclass
36
+ @dataclass_with_extra
38
37
  class SummarizationOutput(BaseInferenceType):
39
38
  """Outputs of inference for the Summarization task"""
40
39
 
@@ -3,13 +3,12 @@
3
3
  # See:
4
4
  # - script: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-codegen.ts
5
5
  # - specs: https://github.com/huggingface/huggingface.js/tree/main/packages/tasks/src/tasks.
6
- from dataclasses import dataclass
7
6
  from typing import Dict, List, Literal, Optional
8
7
 
9
- from .base import BaseInferenceType
8
+ from .base import BaseInferenceType, dataclass_with_extra
10
9
 
11
10
 
12
- @dataclass
11
+ @dataclass_with_extra
13
12
  class TableQuestionAnsweringInputData(BaseInferenceType):
14
13
  """One (table, question) pair to answer"""
15
14
 
@@ -22,7 +21,7 @@ class TableQuestionAnsweringInputData(BaseInferenceType):
22
21
  Padding = Literal["do_not_pad", "longest", "max_length"]
23
22
 
24
23
 
25
- @dataclass
24
+ @dataclass_with_extra
26
25
  class TableQuestionAnsweringParameters(BaseInferenceType):
27
26
  """Additional inference parameters for Table Question Answering"""
28
27
 
@@ -37,7 +36,7 @@ class TableQuestionAnsweringParameters(BaseInferenceType):
37
36
  """Activates and controls truncation."""
38
37
 
39
38
 
40
- @dataclass
39
+ @dataclass_with_extra
41
40
  class TableQuestionAnsweringInput(BaseInferenceType):
42
41
  """Inputs for Table Question Answering inference"""
43
42
 
@@ -47,7 +46,7 @@ class TableQuestionAnsweringInput(BaseInferenceType):
47
46
  """Additional inference parameters for Table Question Answering"""
48
47
 
49
48
 
50
- @dataclass
49
+ @dataclass_with_extra
51
50
  class TableQuestionAnsweringOutputElement(BaseInferenceType):
52
51
  """Outputs of inference for the Table Question Answering task"""
53
52
 
@@ -3,16 +3,15 @@
3
3
  # See:
4
4
  # - script: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-codegen.ts
5
5
  # - specs: https://github.com/huggingface/huggingface.js/tree/main/packages/tasks/src/tasks.
6
- from dataclasses import dataclass
7
6
  from typing import Any, Dict, Literal, Optional
8
7
 
9
- from .base import BaseInferenceType
8
+ from .base import BaseInferenceType, dataclass_with_extra
10
9
 
11
10
 
12
11
  Text2TextGenerationTruncationStrategy = Literal["do_not_truncate", "longest_first", "only_first", "only_second"]
13
12
 
14
13
 
15
- @dataclass
14
+ @dataclass_with_extra
16
15
  class Text2TextGenerationParameters(BaseInferenceType):
17
16
  """Additional inference parameters for Text2text Generation"""
18
17
 
@@ -24,7 +23,7 @@ class Text2TextGenerationParameters(BaseInferenceType):
24
23
  """The truncation strategy to use"""
25
24
 
26
25
 
27
- @dataclass
26
+ @dataclass_with_extra
28
27
  class Text2TextGenerationInput(BaseInferenceType):
29
28
  """Inputs for Text2text Generation inference"""
30
29
 
@@ -34,7 +33,7 @@ class Text2TextGenerationInput(BaseInferenceType):
34
33
  """Additional inference parameters for Text2text Generation"""
35
34
 
36
35
 
37
- @dataclass
36
+ @dataclass_with_extra
38
37
  class Text2TextGenerationOutput(BaseInferenceType):
39
38
  """Outputs of inference for the Text2text Generation task"""
40
39
 
@@ -3,16 +3,15 @@
3
3
  # See:
4
4
  # - script: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-codegen.ts
5
5
  # - specs: https://github.com/huggingface/huggingface.js/tree/main/packages/tasks/src/tasks.
6
- from dataclasses import dataclass
7
6
  from typing import Literal, Optional
8
7
 
9
- from .base import BaseInferenceType
8
+ from .base import BaseInferenceType, dataclass_with_extra
10
9
 
11
10
 
12
11
  TextClassificationOutputTransform = Literal["sigmoid", "softmax", "none"]
13
12
 
14
13
 
15
- @dataclass
14
+ @dataclass_with_extra
16
15
  class TextClassificationParameters(BaseInferenceType):
17
16
  """Additional inference parameters for Text Classification"""
18
17
 
@@ -22,7 +21,7 @@ class TextClassificationParameters(BaseInferenceType):
22
21
  """When specified, limits the output to the top K most probable classes."""
23
22
 
24
23
 
25
- @dataclass
24
+ @dataclass_with_extra
26
25
  class TextClassificationInput(BaseInferenceType):
27
26
  """Inputs for Text Classification inference"""
28
27
 
@@ -32,7 +31,7 @@ class TextClassificationInput(BaseInferenceType):
32
31
  """Additional inference parameters for Text Classification"""
33
32
 
34
33
 
35
- @dataclass
34
+ @dataclass_with_extra
36
35
  class TextClassificationOutputElement(BaseInferenceType):
37
36
  """Outputs of inference for the Text Classification task"""
38
37
 
@@ -3,16 +3,15 @@
3
3
  # See:
4
4
  # - script: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-codegen.ts
5
5
  # - specs: https://github.com/huggingface/huggingface.js/tree/main/packages/tasks/src/tasks.
6
- from dataclasses import dataclass
7
6
  from typing import Any, List, Literal, Optional
8
7
 
9
- from .base import BaseInferenceType
8
+ from .base import BaseInferenceType, dataclass_with_extra
10
9
 
11
10
 
12
11
  TypeEnum = Literal["json", "regex"]
13
12
 
14
13
 
15
- @dataclass
14
+ @dataclass_with_extra
16
15
  class TextGenerationInputGrammarType(BaseInferenceType):
17
16
  type: "TypeEnum"
18
17
  value: Any
@@ -22,7 +21,7 @@ class TextGenerationInputGrammarType(BaseInferenceType):
22
21
  """
23
22
 
24
23
 
25
- @dataclass
24
+ @dataclass_with_extra
26
25
  class TextGenerationInputGenerateParameters(BaseInferenceType):
27
26
  adapter_id: Optional[str] = None
28
27
  """Lora adapter id"""
@@ -73,7 +72,7 @@ class TextGenerationInputGenerateParameters(BaseInferenceType):
73
72
  """
74
73
 
75
74
 
76
- @dataclass
75
+ @dataclass_with_extra
77
76
  class TextGenerationInput(BaseInferenceType):
78
77
  """Text Generation Input.
79
78
  Auto-generated from TGI specs.
@@ -89,14 +88,14 @@ class TextGenerationInput(BaseInferenceType):
89
88
  TextGenerationOutputFinishReason = Literal["length", "eos_token", "stop_sequence"]
90
89
 
91
90
 
92
- @dataclass
91
+ @dataclass_with_extra
93
92
  class TextGenerationOutputPrefillToken(BaseInferenceType):
94
93
  id: int
95
94
  logprob: float
96
95
  text: str
97
96
 
98
97
 
99
- @dataclass
98
+ @dataclass_with_extra
100
99
  class TextGenerationOutputToken(BaseInferenceType):
101
100
  id: int
102
101
  logprob: float
@@ -104,7 +103,7 @@ class TextGenerationOutputToken(BaseInferenceType):
104
103
  text: str
105
104
 
106
105
 
107
- @dataclass
106
+ @dataclass_with_extra
108
107
  class TextGenerationOutputBestOfSequence(BaseInferenceType):
109
108
  finish_reason: "TextGenerationOutputFinishReason"
110
109
  generated_text: str
@@ -115,7 +114,7 @@ class TextGenerationOutputBestOfSequence(BaseInferenceType):
115
114
  top_tokens: Optional[List[List[TextGenerationOutputToken]]] = None
116
115
 
117
116
 
118
- @dataclass
117
+ @dataclass_with_extra
119
118
  class TextGenerationOutputDetails(BaseInferenceType):
120
119
  finish_reason: "TextGenerationOutputFinishReason"
121
120
  generated_tokens: int
@@ -126,7 +125,7 @@ class TextGenerationOutputDetails(BaseInferenceType):
126
125
  top_tokens: Optional[List[List[TextGenerationOutputToken]]] = None
127
126
 
128
127
 
129
- @dataclass
128
+ @dataclass_with_extra
130
129
  class TextGenerationOutput(BaseInferenceType):
131
130
  """Text Generation Output.
132
131
  Auto-generated from TGI specs.
@@ -138,7 +137,7 @@ class TextGenerationOutput(BaseInferenceType):
138
137
  details: Optional[TextGenerationOutputDetails] = None
139
138
 
140
139
 
141
- @dataclass
140
+ @dataclass_with_extra
142
141
  class TextGenerationStreamOutputStreamDetails(BaseInferenceType):
143
142
  finish_reason: "TextGenerationOutputFinishReason"
144
143
  generated_tokens: int
@@ -146,7 +145,7 @@ class TextGenerationStreamOutputStreamDetails(BaseInferenceType):
146
145
  seed: Optional[int] = None
147
146
 
148
147
 
149
- @dataclass
148
+ @dataclass_with_extra
150
149
  class TextGenerationStreamOutputToken(BaseInferenceType):
151
150
  id: int
152
151
  logprob: float
@@ -154,7 +153,7 @@ class TextGenerationStreamOutputToken(BaseInferenceType):
154
153
  text: str
155
154
 
156
155
 
157
- @dataclass
156
+ @dataclass_with_extra
158
157
  class TextGenerationStreamOutput(BaseInferenceType):
159
158
  """Text Generation Stream Output.
160
159
  Auto-generated from TGI specs.
@@ -3,16 +3,15 @@
3
3
  # See:
4
4
  # - script: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-codegen.ts
5
5
  # - specs: https://github.com/huggingface/huggingface.js/tree/main/packages/tasks/src/tasks.
6
- from dataclasses import dataclass
7
6
  from typing import Any, Literal, Optional, Union
8
7
 
9
- from .base import BaseInferenceType
8
+ from .base import BaseInferenceType, dataclass_with_extra
10
9
 
11
10
 
12
11
  TextToAudioEarlyStoppingEnum = Literal["never"]
13
12
 
14
13
 
15
- @dataclass
14
+ @dataclass_with_extra
16
15
  class TextToAudioGenerationParameters(BaseInferenceType):
17
16
  """Parametrization of the text generation process"""
18
17
 
@@ -72,7 +71,7 @@ class TextToAudioGenerationParameters(BaseInferenceType):
72
71
  """Whether the model should use the past last key/values attentions to speed up decoding"""
73
72
 
74
73
 
75
- @dataclass
74
+ @dataclass_with_extra
76
75
  class TextToAudioParameters(BaseInferenceType):
77
76
  """Additional inference parameters for Text To Audio"""
78
77
 
@@ -81,7 +80,7 @@ class TextToAudioParameters(BaseInferenceType):
81
80
  """Parametrization of the text generation process"""
82
81
 
83
82
 
84
- @dataclass
83
+ @dataclass_with_extra
85
84
  class TextToAudioInput(BaseInferenceType):
86
85
  """Inputs for Text To Audio inference"""
87
86
 
@@ -91,7 +90,7 @@ class TextToAudioInput(BaseInferenceType):
91
90
  """Additional inference parameters for Text To Audio"""
92
91
 
93
92
 
94
- @dataclass
93
+ @dataclass_with_extra
95
94
  class TextToAudioOutput(BaseInferenceType):
96
95
  """Outputs of inference for the Text To Audio task"""
97
96
 
@@ -3,21 +3,12 @@
3
3
  # See:
4
4
  # - script: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-codegen.ts
5
5
  # - specs: https://github.com/huggingface/huggingface.js/tree/main/packages/tasks/src/tasks.
6
- from dataclasses import dataclass
7
6
  from typing import Any, Optional
8
7
 
9
- from .base import BaseInferenceType
8
+ from .base import BaseInferenceType, dataclass_with_extra
10
9
 
11
10
 
12
- @dataclass
13
- class TextToImageTargetSize(BaseInferenceType):
14
- """The size in pixel of the output image"""
15
-
16
- height: int
17
- width: int
18
-
19
-
20
- @dataclass
11
+ @dataclass_with_extra
21
12
  class TextToImageParameters(BaseInferenceType):
22
13
  """Additional inference parameters for Text To Image"""
23
14
 
@@ -25,6 +16,8 @@ class TextToImageParameters(BaseInferenceType):
25
16
  """A higher guidance scale value encourages the model to generate images closely linked to
26
17
  the text prompt, but values too high may cause saturation and other artifacts.
27
18
  """
19
+ height: Optional[int] = None
20
+ """The height in pixels of the output image"""
28
21
  negative_prompt: Optional[str] = None
29
22
  """One prompt to guide what NOT to include in image generation."""
30
23
  num_inference_steps: Optional[int] = None
@@ -35,11 +28,11 @@ class TextToImageParameters(BaseInferenceType):
35
28
  """Override the scheduler with a compatible one."""
36
29
  seed: Optional[int] = None
37
30
  """Seed for the random number generator."""
38
- target_size: Optional[TextToImageTargetSize] = None
39
- """The size in pixel of the output image"""
31
+ width: Optional[int] = None
32
+ """The width in pixels of the output image"""
40
33
 
41
34
 
42
- @dataclass
35
+ @dataclass_with_extra
43
36
  class TextToImageInput(BaseInferenceType):
44
37
  """Inputs for Text To Image inference"""
45
38
 
@@ -49,7 +42,7 @@ class TextToImageInput(BaseInferenceType):
49
42
  """Additional inference parameters for Text To Image"""
50
43
 
51
44
 
52
- @dataclass
45
+ @dataclass_with_extra
53
46
  class TextToImageOutput(BaseInferenceType):
54
47
  """Outputs of inference for the Text To Image task"""
55
48
 
@@ -3,16 +3,15 @@
3
3
  # See:
4
4
  # - script: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-codegen.ts
5
5
  # - specs: https://github.com/huggingface/huggingface.js/tree/main/packages/tasks/src/tasks.
6
- from dataclasses import dataclass
7
6
  from typing import Any, Literal, Optional, Union
8
7
 
9
- from .base import BaseInferenceType
8
+ from .base import BaseInferenceType, dataclass_with_extra
10
9
 
11
10
 
12
11
  TextToSpeechEarlyStoppingEnum = Literal["never"]
13
12
 
14
13
 
15
- @dataclass
14
+ @dataclass_with_extra
16
15
  class TextToSpeechGenerationParameters(BaseInferenceType):
17
16
  """Parametrization of the text generation process"""
18
17
 
@@ -72,7 +71,7 @@ class TextToSpeechGenerationParameters(BaseInferenceType):
72
71
  """Whether the model should use the past last key/values attentions to speed up decoding"""
73
72
 
74
73
 
75
- @dataclass
74
+ @dataclass_with_extra
76
75
  class TextToSpeechParameters(BaseInferenceType):
77
76
  """Additional inference parameters for Text To Speech"""
78
77
 
@@ -81,7 +80,7 @@ class TextToSpeechParameters(BaseInferenceType):
81
80
  """Parametrization of the text generation process"""
82
81
 
83
82
 
84
- @dataclass
83
+ @dataclass_with_extra
85
84
  class TextToSpeechInput(BaseInferenceType):
86
85
  """Inputs for Text To Speech inference"""
87
86
 
@@ -91,7 +90,7 @@ class TextToSpeechInput(BaseInferenceType):
91
90
  """Additional inference parameters for Text To Speech"""
92
91
 
93
92
 
94
- @dataclass
93
+ @dataclass_with_extra
95
94
  class TextToSpeechOutput(BaseInferenceType):
96
95
  """Outputs of inference for the Text To Speech task"""
97
96
 
@@ -3,13 +3,12 @@
3
3
  # See:
4
4
  # - script: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-codegen.ts
5
5
  # - specs: https://github.com/huggingface/huggingface.js/tree/main/packages/tasks/src/tasks.
6
- from dataclasses import dataclass
7
6
  from typing import Any, List, Optional
8
7
 
9
- from .base import BaseInferenceType
8
+ from .base import BaseInferenceType, dataclass_with_extra
10
9
 
11
10
 
12
- @dataclass
11
+ @dataclass_with_extra
13
12
  class TextToVideoParameters(BaseInferenceType):
14
13
  """Additional inference parameters for Text To Video"""
15
14
 
@@ -29,7 +28,7 @@ class TextToVideoParameters(BaseInferenceType):
29
28
  """Seed for the random number generator."""
30
29
 
31
30
 
32
- @dataclass
31
+ @dataclass_with_extra
33
32
  class TextToVideoInput(BaseInferenceType):
34
33
  """Inputs for Text To Video inference"""
35
34
 
@@ -39,7 +38,7 @@ class TextToVideoInput(BaseInferenceType):
39
38
  """Additional inference parameters for Text To Video"""
40
39
 
41
40
 
42
- @dataclass
41
+ @dataclass_with_extra
43
42
  class TextToVideoOutput(BaseInferenceType):
44
43
  """Outputs of inference for the Text To Video task"""
45
44
 
@@ -3,16 +3,15 @@
3
3
  # See:
4
4
  # - script: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-codegen.ts
5
5
  # - specs: https://github.com/huggingface/huggingface.js/tree/main/packages/tasks/src/tasks.
6
- from dataclasses import dataclass
7
6
  from typing import List, Literal, Optional
8
7
 
9
- from .base import BaseInferenceType
8
+ from .base import BaseInferenceType, dataclass_with_extra
10
9
 
11
10
 
12
11
  TokenClassificationAggregationStrategy = Literal["none", "simple", "first", "average", "max"]
13
12
 
14
13
 
15
- @dataclass
14
+ @dataclass_with_extra
16
15
  class TokenClassificationParameters(BaseInferenceType):
17
16
  """Additional inference parameters for Token Classification"""
18
17
 
@@ -24,7 +23,7 @@ class TokenClassificationParameters(BaseInferenceType):
24
23
  """The number of overlapping tokens between chunks when splitting the input text."""
25
24
 
26
25
 
27
- @dataclass
26
+ @dataclass_with_extra
28
27
  class TokenClassificationInput(BaseInferenceType):
29
28
  """Inputs for Token Classification inference"""
30
29
 
@@ -34,7 +33,7 @@ class TokenClassificationInput(BaseInferenceType):
34
33
  """Additional inference parameters for Token Classification"""
35
34
 
36
35
 
37
- @dataclass
36
+ @dataclass_with_extra
38
37
  class TokenClassificationOutputElement(BaseInferenceType):
39
38
  """Outputs of inference for the Token Classification task"""
40
39
 
@@ -3,16 +3,15 @@
3
3
  # See:
4
4
  # - script: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-codegen.ts
5
5
  # - specs: https://github.com/huggingface/huggingface.js/tree/main/packages/tasks/src/tasks.
6
- from dataclasses import dataclass
7
6
  from typing import Any, Dict, Literal, Optional
8
7
 
9
- from .base import BaseInferenceType
8
+ from .base import BaseInferenceType, dataclass_with_extra
10
9
 
11
10
 
12
11
  TranslationTruncationStrategy = Literal["do_not_truncate", "longest_first", "only_first", "only_second"]
13
12
 
14
13
 
15
- @dataclass
14
+ @dataclass_with_extra
16
15
  class TranslationParameters(BaseInferenceType):
17
16
  """Additional inference parameters for Translation"""
18
17
 
@@ -32,7 +31,7 @@ class TranslationParameters(BaseInferenceType):
32
31
  """The truncation strategy to use."""
33
32
 
34
33
 
35
- @dataclass
34
+ @dataclass_with_extra
36
35
  class TranslationInput(BaseInferenceType):
37
36
  """Inputs for Translation inference"""
38
37
 
@@ -42,7 +41,7 @@ class TranslationInput(BaseInferenceType):
42
41
  """Additional inference parameters for Translation"""
43
42
 
44
43
 
45
- @dataclass
44
+ @dataclass_with_extra
46
45
  class TranslationOutput(BaseInferenceType):
47
46
  """Outputs of inference for the Translation task"""
48
47
 
@@ -3,16 +3,15 @@
3
3
  # See:
4
4
  # - script: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-codegen.ts
5
5
  # - specs: https://github.com/huggingface/huggingface.js/tree/main/packages/tasks/src/tasks.
6
- from dataclasses import dataclass
7
6
  from typing import Any, Literal, Optional
8
7
 
9
- from .base import BaseInferenceType
8
+ from .base import BaseInferenceType, dataclass_with_extra
10
9
 
11
10
 
12
11
  VideoClassificationOutputTransform = Literal["sigmoid", "softmax", "none"]
13
12
 
14
13
 
15
- @dataclass
14
+ @dataclass_with_extra
16
15
  class VideoClassificationParameters(BaseInferenceType):
17
16
  """Additional inference parameters for Video Classification"""
18
17
 
@@ -26,7 +25,7 @@ class VideoClassificationParameters(BaseInferenceType):
26
25
  """When specified, limits the output to the top K most probable classes."""
27
26
 
28
27
 
29
- @dataclass
28
+ @dataclass_with_extra
30
29
  class VideoClassificationInput(BaseInferenceType):
31
30
  """Inputs for Video Classification inference"""
32
31
 
@@ -36,7 +35,7 @@ class VideoClassificationInput(BaseInferenceType):
36
35
  """Additional inference parameters for Video Classification"""
37
36
 
38
37
 
39
- @dataclass
38
+ @dataclass_with_extra
40
39
  class VideoClassificationOutputElement(BaseInferenceType):
41
40
  """Outputs of inference for the Video Classification task"""
42
41
 
@@ -3,13 +3,12 @@
3
3
  # See:
4
4
  # - script: https://github.com/huggingface/huggingface.js/blob/main/packages/tasks/scripts/inference-codegen.ts
5
5
  # - specs: https://github.com/huggingface/huggingface.js/tree/main/packages/tasks/src/tasks.
6
- from dataclasses import dataclass
7
6
  from typing import Any, Optional
8
7
 
9
- from .base import BaseInferenceType
8
+ from .base import BaseInferenceType, dataclass_with_extra
10
9
 
11
10
 
12
- @dataclass
11
+ @dataclass_with_extra
13
12
  class VisualQuestionAnsweringInputData(BaseInferenceType):
14
13
  """One (image, question) pair to answer"""
15
14
 
@@ -19,7 +18,7 @@ class VisualQuestionAnsweringInputData(BaseInferenceType):
19
18
  """The question to answer based on the image."""
20
19
 
21
20
 
22
- @dataclass
21
+ @dataclass_with_extra
23
22
  class VisualQuestionAnsweringParameters(BaseInferenceType):
24
23
  """Additional inference parameters for Visual Question Answering"""
25
24
 
@@ -30,7 +29,7 @@ class VisualQuestionAnsweringParameters(BaseInferenceType):
30
29
  """
31
30
 
32
31
 
33
- @dataclass
32
+ @dataclass_with_extra
34
33
  class VisualQuestionAnsweringInput(BaseInferenceType):
35
34
  """Inputs for Visual Question Answering inference"""
36
35
 
@@ -40,7 +39,7 @@ class VisualQuestionAnsweringInput(BaseInferenceType):
40
39
  """Additional inference parameters for Visual Question Answering"""
41
40
 
42
41
 
43
- @dataclass
42
+ @dataclass_with_extra
44
43
  class VisualQuestionAnsweringOutputElement(BaseInferenceType):
45
44
  """Outputs of inference for the Visual Question Answering task"""
46
45