rapidata 2.15.0__py3-none-any.whl → 2.17.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rapidata might be problematic. Click here for more details.

Files changed (77) hide show
  1. rapidata/__init__.py +1 -0
  2. rapidata/api_client/__init__.py +17 -7
  3. rapidata/api_client/api/__init__.py +2 -1
  4. rapidata/api_client/api/campaign_api.py +554 -30
  5. rapidata/api_client/api/client_api.py +295 -23
  6. rapidata/api_client/api/coco_api.py +594 -8
  7. rapidata/api_client/api/compare_workflow_api.py +23 -23
  8. rapidata/api_client/api/datapoint_api.py +548 -26
  9. rapidata/api_client/api/dataset_api.py +2198 -186
  10. rapidata/api_client/api/feedback_api.py +306 -0
  11. rapidata/api_client/api/identity_api.py +1143 -78
  12. rapidata/api_client/api/newsletter_api.py +299 -11
  13. rapidata/api_client/api/order_api.py +5367 -565
  14. rapidata/api_client/api/pipeline_api.py +249 -510
  15. rapidata/api_client/api/rapid_api.py +1930 -254
  16. rapidata/api_client/api/simple_workflow_api.py +23 -23
  17. rapidata/api_client/api/validation_set_api.py +5259 -0
  18. rapidata/api_client/api/workflow_api.py +932 -137
  19. rapidata/api_client/models/__init__.py +15 -6
  20. rapidata/api_client/models/ab_test_selection_a_inner.py +24 -10
  21. rapidata/api_client/models/add_campaign_model.py +1 -1
  22. rapidata/api_client/models/add_user_response_result.py +106 -0
  23. rapidata/api_client/models/add_user_response_result_validation_truth.py +258 -0
  24. rapidata/api_client/models/add_validation_rapid_model.py +3 -3
  25. rapidata/api_client/models/add_validation_text_rapid_model.py +3 -3
  26. rapidata/api_client/models/are_rapids_active_result.py +87 -0
  27. rapidata/api_client/models/compare_workflow_config_model.py +1 -1
  28. rapidata/api_client/models/compare_workflow_model.py +4 -4
  29. rapidata/api_client/models/create_datapoint_from_files_model.py +102 -0
  30. rapidata/api_client/models/create_datapoint_from_files_model_metadata_inner.py +168 -0
  31. rapidata/api_client/models/create_datapoint_from_text_sources_model.py +109 -0
  32. rapidata/api_client/models/create_datapoint_from_urls_model.py +5 -5
  33. rapidata/api_client/models/create_datapoints_from_s3_bucket_model.py +124 -0
  34. rapidata/api_client/models/create_order_model.py +1 -1
  35. rapidata/api_client/models/create_rapid_result.py +87 -0
  36. rapidata/api_client/models/create_validation_set_model.py +87 -0
  37. rapidata/api_client/models/datapoint_metadata_model.py +3 -3
  38. rapidata/api_client/models/early_stopping_referee_model.py +1 -1
  39. rapidata/api_client/models/elo_config_model.py +2 -2
  40. rapidata/api_client/models/evaluation_workflow_model.py +2 -2
  41. rapidata/api_client/models/get_validation_rapids_query.py +123 -0
  42. rapidata/api_client/models/get_validation_rapids_query_paged_result.py +105 -0
  43. rapidata/api_client/models/online_pair_maker_config_model.py +1 -1
  44. rapidata/api_client/models/pipeline_id_workflow_artifact_id_put_request.py +140 -0
  45. rapidata/api_client/models/query_validation_rapids_result.py +3 -3
  46. rapidata/api_client/models/rapid_issue.py +4 -0
  47. rapidata/api_client/models/report_model.py +4 -4
  48. rapidata/api_client/models/shuffling_selection.py +106 -0
  49. rapidata/api_client/models/simple_workflow_config_model.py +1 -1
  50. rapidata/api_client/models/simple_workflow_config_model_blueprint.py +1 -1
  51. rapidata/api_client/models/simple_workflow_model.py +1 -1
  52. rapidata/api_client/models/simple_workflow_model_blueprint.py +1 -1
  53. rapidata/api_client/models/update_campaign_model.py +1 -1
  54. rapidata/api_client/models/update_dataset_name_model.py +87 -0
  55. rapidata/api_client/models/update_order_name_model.py +87 -0
  56. rapidata/api_client/models/upload_text_sources_to_dataset_model.py +3 -3
  57. rapidata/api_client/models/user_score_user_filter_model.py +9 -2
  58. rapidata/api_client_README.md +153 -88
  59. rapidata/rapidata_client/__init__.py +1 -0
  60. rapidata/rapidata_client/demographic/demographic_manager.py +1 -1
  61. rapidata/rapidata_client/filter/user_score_filter.py +4 -1
  62. rapidata/rapidata_client/order/_rapidata_dataset.py +10 -11
  63. rapidata/rapidata_client/order/_rapidata_order_builder.py +1 -1
  64. rapidata/rapidata_client/order/rapidata_order.py +5 -5
  65. rapidata/rapidata_client/order/rapidata_order_manager.py +1 -1
  66. rapidata/rapidata_client/order/rapidata_results.py +1 -1
  67. rapidata/rapidata_client/selection/__init__.py +1 -0
  68. rapidata/rapidata_client/selection/rapidata_selections.py +4 -1
  69. rapidata/rapidata_client/selection/shuffling_selection.py +36 -0
  70. rapidata/rapidata_client/validation/rapidata_validation_set.py +11 -0
  71. rapidata/rapidata_client/validation/rapids/rapids.py +3 -5
  72. rapidata/rapidata_client/validation/validation_set_manager.py +36 -21
  73. rapidata/rapidata_client/workflow/_ranking_workflow.py +2 -2
  74. {rapidata-2.15.0.dist-info → rapidata-2.17.0.dist-info}/METADATA +1 -1
  75. {rapidata-2.15.0.dist-info → rapidata-2.17.0.dist-info}/RECORD +77 -59
  76. {rapidata-2.15.0.dist-info → rapidata-2.17.0.dist-info}/LICENSE +0 -0
  77. {rapidata-2.15.0.dist-info → rapidata-2.17.0.dist-info}/WHEEL +0 -0
@@ -0,0 +1,87 @@
1
+ # coding: utf-8
2
+
3
+ """
4
+ Rapidata.Dataset
5
+
6
+ No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
7
+
8
+ The version of the OpenAPI document: v1
9
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
10
+
11
+ Do not edit the class manually.
12
+ """ # noqa: E501
13
+
14
+
15
+ from __future__ import annotations
16
+ import pprint
17
+ import re # noqa: F401
18
+ import json
19
+
20
+ from pydantic import BaseModel, ConfigDict, Field, StrictBool
21
+ from typing import Any, ClassVar, Dict, List
22
+ from typing import Optional, Set
23
+ from typing_extensions import Self
24
+
25
+ class AreRapidsActiveResult(BaseModel):
26
+ """
27
+ AreRapidsActiveResult
28
+ """ # noqa: E501
29
+ is_valid: StrictBool = Field(alias="isValid")
30
+ __properties: ClassVar[List[str]] = ["isValid"]
31
+
32
+ model_config = ConfigDict(
33
+ populate_by_name=True,
34
+ validate_assignment=True,
35
+ protected_namespaces=(),
36
+ )
37
+
38
+
39
+ def to_str(self) -> str:
40
+ """Returns the string representation of the model using alias"""
41
+ return pprint.pformat(self.model_dump(by_alias=True))
42
+
43
+ def to_json(self) -> str:
44
+ """Returns the JSON representation of the model using alias"""
45
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
46
+ return json.dumps(self.to_dict())
47
+
48
+ @classmethod
49
+ def from_json(cls, json_str: str) -> Optional[Self]:
50
+ """Create an instance of AreRapidsActiveResult from a JSON string"""
51
+ return cls.from_dict(json.loads(json_str))
52
+
53
+ def to_dict(self) -> Dict[str, Any]:
54
+ """Return the dictionary representation of the model using alias.
55
+
56
+ This has the following differences from calling pydantic's
57
+ `self.model_dump(by_alias=True)`:
58
+
59
+ * `None` is only added to the output dict for nullable fields that
60
+ were set at model initialization. Other fields with value `None`
61
+ are ignored.
62
+ """
63
+ excluded_fields: Set[str] = set([
64
+ ])
65
+
66
+ _dict = self.model_dump(
67
+ by_alias=True,
68
+ exclude=excluded_fields,
69
+ exclude_none=True,
70
+ )
71
+ return _dict
72
+
73
+ @classmethod
74
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
75
+ """Create an instance of AreRapidsActiveResult from a dict"""
76
+ if obj is None:
77
+ return None
78
+
79
+ if not isinstance(obj, dict):
80
+ return cls.model_validate(obj)
81
+
82
+ _obj = cls.model_validate({
83
+ "isValid": obj.get("isValid")
84
+ })
85
+ return _obj
86
+
87
+
@@ -28,7 +28,7 @@ from typing_extensions import Self
28
28
 
29
29
  class CompareWorkflowConfigModel(BaseModel):
30
30
  """
31
- The configuration for creating a compare workflow. A compare workflow is a workflow that continuously matches datapoints against each other and updates their respective ELO scores. The ELO scores are used to determine the relative strength of the datapoints, and datapoints are matched against other datapoints with similar ELO scores. The end goal is a ranking of the datapoints based on their relative strength.
31
+ The configuration for creating a compare workflow. A compare workflow is a workflow that continuously matches datapoints against each other and updates their respective ELO scores. The ELO scores are used to determine the relative strength of the datapoints, and datapoints are matched against other datapoints with similar ELO scores. The end goal is a ranking of the datapoints based on their relative strength.
32
32
  """ # noqa: E501
33
33
  t: StrictStr = Field(description="Discriminator value for CompareWorkflowConfig", alias="_t")
34
34
  criteria: StrictStr = Field(description="The criteria to add to each compare rapid.")
@@ -20,20 +20,20 @@ import json
20
20
  from pydantic import BaseModel, ConfigDict, Field, StrictStr, field_validator
21
21
  from typing import Any, ClassVar, Dict, List, Optional
22
22
  from rapidata.api_client.models.compare_workflow_model_pair_maker_config import CompareWorkflowModelPairMakerConfig
23
- from rapidata.api_client.models.create_datapoint_from_urls_model_metadata_inner import CreateDatapointFromUrlsModelMetadataInner
23
+ from rapidata.api_client.models.create_datapoint_from_files_model_metadata_inner import CreateDatapointFromFilesModelMetadataInner
24
24
  from rapidata.api_client.models.elo_config_model import EloConfigModel
25
25
  from typing import Optional, Set
26
26
  from typing_extensions import Self
27
27
 
28
28
  class CompareWorkflowModel(BaseModel):
29
29
  """
30
- If the SimpleWorkflow is chosen, each datapoint uploaded will correspond to a single task to be solved. This is the most commonly chosen workflow.
30
+ If the SimpleWorkflow is chosen, each datapoint uploaded will correspond to a single task to be solved. This is the most commonly chosen workflow.
31
31
  """ # noqa: E501
32
32
  t: StrictStr = Field(description="Discriminator value for CompareWorkflow", alias="_t")
33
33
  criteria: StrictStr = Field(description="The criteria that the datapoints should be compared based on. No default value.")
34
34
  pair_maker_config: Optional[CompareWorkflowModelPairMakerConfig] = Field(default=None, alias="pairMakerConfig")
35
35
  elo_config: Optional[EloConfigModel] = Field(default=None, alias="eloConfig")
36
- metadata: Optional[List[CreateDatapointFromUrlsModelMetadataInner]] = Field(default=None, description="The metadata is attached to every single rapid and can be used for something like the prompt.")
36
+ metadata: Optional[List[CreateDatapointFromFilesModelMetadataInner]] = Field(default=None, description="The metadata is attached to every single rapid and can be used for something like the prompt.")
37
37
  __properties: ClassVar[List[str]] = ["_t", "criteria", "pairMakerConfig", "eloConfig", "metadata"]
38
38
 
39
39
  @field_validator('t')
@@ -116,7 +116,7 @@ class CompareWorkflowModel(BaseModel):
116
116
  "criteria": obj.get("criteria"),
117
117
  "pairMakerConfig": CompareWorkflowModelPairMakerConfig.from_dict(obj["pairMakerConfig"]) if obj.get("pairMakerConfig") is not None else None,
118
118
  "eloConfig": EloConfigModel.from_dict(obj["eloConfig"]) if obj.get("eloConfig") is not None else None,
119
- "metadata": [CreateDatapointFromUrlsModelMetadataInner.from_dict(_item) for _item in obj["metadata"]] if obj.get("metadata") is not None else None
119
+ "metadata": [CreateDatapointFromFilesModelMetadataInner.from_dict(_item) for _item in obj["metadata"]] if obj.get("metadata") is not None else None
120
120
  })
121
121
  return _obj
122
122
 
@@ -0,0 +1,102 @@
1
+ # coding: utf-8
2
+
3
+ """
4
+ Rapidata.Dataset
5
+
6
+ No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
7
+
8
+ The version of the OpenAPI document: v1
9
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
10
+
11
+ Do not edit the class manually.
12
+ """ # noqa: E501
13
+
14
+
15
+ from __future__ import annotations
16
+ import pprint
17
+ import re # noqa: F401
18
+ import json
19
+
20
+ from pydantic import BaseModel, ConfigDict, Field, StrictInt
21
+ from typing import Any, ClassVar, Dict, List, Optional
22
+ from rapidata.api_client.models.create_datapoint_from_files_model_metadata_inner import CreateDatapointFromFilesModelMetadataInner
23
+ from typing import Optional, Set
24
+ from typing_extensions import Self
25
+
26
+ class CreateDatapointFromFilesModel(BaseModel):
27
+ """
28
+ The form request for creating a datapoint from files. Needs to be encoded as a json string in the form request.
29
+ """ # noqa: E501
30
+ metadata: List[CreateDatapointFromFilesModelMetadataInner] = Field(description="The metadata of the datapoint.")
31
+ sort_index: Optional[StrictInt] = Field(default=None, description="The index will be used to keep the datapoints in order. Useful if upload is parallelized", alias="sortIndex")
32
+ __properties: ClassVar[List[str]] = ["metadata", "sortIndex"]
33
+
34
+ model_config = ConfigDict(
35
+ populate_by_name=True,
36
+ validate_assignment=True,
37
+ protected_namespaces=(),
38
+ )
39
+
40
+
41
+ def to_str(self) -> str:
42
+ """Returns the string representation of the model using alias"""
43
+ return pprint.pformat(self.model_dump(by_alias=True))
44
+
45
+ def to_json(self) -> str:
46
+ """Returns the JSON representation of the model using alias"""
47
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
48
+ return json.dumps(self.to_dict())
49
+
50
+ @classmethod
51
+ def from_json(cls, json_str: str) -> Optional[Self]:
52
+ """Create an instance of CreateDatapointFromFilesModel from a JSON string"""
53
+ return cls.from_dict(json.loads(json_str))
54
+
55
+ def to_dict(self) -> Dict[str, Any]:
56
+ """Return the dictionary representation of the model using alias.
57
+
58
+ This has the following differences from calling pydantic's
59
+ `self.model_dump(by_alias=True)`:
60
+
61
+ * `None` is only added to the output dict for nullable fields that
62
+ were set at model initialization. Other fields with value `None`
63
+ are ignored.
64
+ """
65
+ excluded_fields: Set[str] = set([
66
+ ])
67
+
68
+ _dict = self.model_dump(
69
+ by_alias=True,
70
+ exclude=excluded_fields,
71
+ exclude_none=True,
72
+ )
73
+ # override the default output from pydantic by calling `to_dict()` of each item in metadata (list)
74
+ _items = []
75
+ if self.metadata:
76
+ for _item_metadata in self.metadata:
77
+ if _item_metadata:
78
+ _items.append(_item_metadata.to_dict())
79
+ _dict['metadata'] = _items
80
+ # set to None if sort_index (nullable) is None
81
+ # and model_fields_set contains the field
82
+ if self.sort_index is None and "sort_index" in self.model_fields_set:
83
+ _dict['sortIndex'] = None
84
+
85
+ return _dict
86
+
87
+ @classmethod
88
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
89
+ """Create an instance of CreateDatapointFromFilesModel from a dict"""
90
+ if obj is None:
91
+ return None
92
+
93
+ if not isinstance(obj, dict):
94
+ return cls.model_validate(obj)
95
+
96
+ _obj = cls.model_validate({
97
+ "metadata": [CreateDatapointFromFilesModelMetadataInner.from_dict(_item) for _item in obj["metadata"]] if obj.get("metadata") is not None else None,
98
+ "sortIndex": obj.get("sortIndex")
99
+ })
100
+ return _obj
101
+
102
+
@@ -0,0 +1,168 @@
1
+ # coding: utf-8
2
+
3
+ """
4
+ Rapidata.Dataset
5
+
6
+ No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
7
+
8
+ The version of the OpenAPI document: v1
9
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
10
+
11
+ Do not edit the class manually.
12
+ """ # noqa: E501
13
+
14
+
15
+ from __future__ import annotations
16
+ import json
17
+ import pprint
18
+ from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator
19
+ from typing import Any, List, Optional
20
+ from rapidata.api_client.models.private_text_metadata_input import PrivateTextMetadataInput
21
+ from rapidata.api_client.models.prompt_metadata_input import PromptMetadataInput
22
+ from rapidata.api_client.models.public_text_metadata_input import PublicTextMetadataInput
23
+ from rapidata.api_client.models.transcription_metadata_input import TranscriptionMetadataInput
24
+ from pydantic import StrictStr, Field
25
+ from typing import Union, List, Set, Optional, Dict
26
+ from typing_extensions import Literal, Self
27
+
28
+ CREATEDATAPOINTFROMFILESMODELMETADATAINNER_ONE_OF_SCHEMAS = ["PrivateTextMetadataInput", "PromptMetadataInput", "PublicTextMetadataInput", "TranscriptionMetadataInput"]
29
+
30
+ class CreateDatapointFromFilesModelMetadataInner(BaseModel):
31
+ """
32
+ CreateDatapointFromFilesModelMetadataInner
33
+ """
34
+ # data type: PrivateTextMetadataInput
35
+ oneof_schema_1_validator: Optional[PrivateTextMetadataInput] = None
36
+ # data type: PromptMetadataInput
37
+ oneof_schema_2_validator: Optional[PromptMetadataInput] = None
38
+ # data type: PublicTextMetadataInput
39
+ oneof_schema_3_validator: Optional[PublicTextMetadataInput] = None
40
+ # data type: TranscriptionMetadataInput
41
+ oneof_schema_4_validator: Optional[TranscriptionMetadataInput] = None
42
+ actual_instance: Optional[Union[PrivateTextMetadataInput, PromptMetadataInput, PublicTextMetadataInput, TranscriptionMetadataInput]] = None
43
+ one_of_schemas: Set[str] = { "PrivateTextMetadataInput", "PromptMetadataInput", "PublicTextMetadataInput", "TranscriptionMetadataInput" }
44
+
45
+ model_config = ConfigDict(
46
+ validate_assignment=True,
47
+ protected_namespaces=(),
48
+ )
49
+
50
+
51
+ discriminator_value_class_map: Dict[str, str] = {
52
+ }
53
+
54
+ def __init__(self, *args, **kwargs) -> None:
55
+ if args:
56
+ if len(args) > 1:
57
+ raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`")
58
+ if kwargs:
59
+ raise ValueError("If a position argument is used, keyword arguments cannot be used.")
60
+ super().__init__(actual_instance=args[0])
61
+ else:
62
+ super().__init__(**kwargs)
63
+
64
+ @field_validator('actual_instance')
65
+ def actual_instance_must_validate_oneof(cls, v):
66
+ instance = CreateDatapointFromFilesModelMetadataInner.model_construct()
67
+ error_messages = []
68
+ match = 0
69
+ # validate data type: PrivateTextMetadataInput
70
+ if not isinstance(v, PrivateTextMetadataInput):
71
+ error_messages.append(f"Error! Input type `{type(v)}` is not `PrivateTextMetadataInput`")
72
+ else:
73
+ match += 1
74
+ # validate data type: PromptMetadataInput
75
+ if not isinstance(v, PromptMetadataInput):
76
+ error_messages.append(f"Error! Input type `{type(v)}` is not `PromptMetadataInput`")
77
+ else:
78
+ match += 1
79
+ # validate data type: PublicTextMetadataInput
80
+ if not isinstance(v, PublicTextMetadataInput):
81
+ error_messages.append(f"Error! Input type `{type(v)}` is not `PublicTextMetadataInput`")
82
+ else:
83
+ match += 1
84
+ # validate data type: TranscriptionMetadataInput
85
+ if not isinstance(v, TranscriptionMetadataInput):
86
+ error_messages.append(f"Error! Input type `{type(v)}` is not `TranscriptionMetadataInput`")
87
+ else:
88
+ match += 1
89
+ if match > 1:
90
+ # more than 1 match
91
+ raise ValueError("Multiple matches found when setting `actual_instance` in CreateDatapointFromFilesModelMetadataInner with oneOf schemas: PrivateTextMetadataInput, PromptMetadataInput, PublicTextMetadataInput, TranscriptionMetadataInput. Details: " + ", ".join(error_messages))
92
+ elif match == 0:
93
+ # no match
94
+ raise ValueError("No match found when setting `actual_instance` in CreateDatapointFromFilesModelMetadataInner with oneOf schemas: PrivateTextMetadataInput, PromptMetadataInput, PublicTextMetadataInput, TranscriptionMetadataInput. Details: " + ", ".join(error_messages))
95
+ else:
96
+ return v
97
+
98
+ @classmethod
99
+ def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self:
100
+ return cls.from_json(json.dumps(obj))
101
+
102
+ @classmethod
103
+ def from_json(cls, json_str: str) -> Self:
104
+ """Returns the object represented by the json string"""
105
+ instance = cls.model_construct()
106
+ error_messages = []
107
+ match = 0
108
+
109
+ # deserialize data into PrivateTextMetadataInput
110
+ try:
111
+ instance.actual_instance = PrivateTextMetadataInput.from_json(json_str)
112
+ match += 1
113
+ except (ValidationError, ValueError) as e:
114
+ error_messages.append(str(e))
115
+ # deserialize data into PromptMetadataInput
116
+ try:
117
+ instance.actual_instance = PromptMetadataInput.from_json(json_str)
118
+ match += 1
119
+ except (ValidationError, ValueError) as e:
120
+ error_messages.append(str(e))
121
+ # deserialize data into PublicTextMetadataInput
122
+ try:
123
+ instance.actual_instance = PublicTextMetadataInput.from_json(json_str)
124
+ match += 1
125
+ except (ValidationError, ValueError) as e:
126
+ error_messages.append(str(e))
127
+ # deserialize data into TranscriptionMetadataInput
128
+ try:
129
+ instance.actual_instance = TranscriptionMetadataInput.from_json(json_str)
130
+ match += 1
131
+ except (ValidationError, ValueError) as e:
132
+ error_messages.append(str(e))
133
+
134
+ if match > 1:
135
+ # more than 1 match
136
+ raise ValueError("Multiple matches found when deserializing the JSON string into CreateDatapointFromFilesModelMetadataInner with oneOf schemas: PrivateTextMetadataInput, PromptMetadataInput, PublicTextMetadataInput, TranscriptionMetadataInput. Details: " + ", ".join(error_messages))
137
+ elif match == 0:
138
+ # no match
139
+ raise ValueError("No match found when deserializing the JSON string into CreateDatapointFromFilesModelMetadataInner with oneOf schemas: PrivateTextMetadataInput, PromptMetadataInput, PublicTextMetadataInput, TranscriptionMetadataInput. Details: " + ", ".join(error_messages))
140
+ else:
141
+ return instance
142
+
143
+ def to_json(self) -> str:
144
+ """Returns the JSON representation of the actual instance"""
145
+ if self.actual_instance is None:
146
+ return "null"
147
+
148
+ if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json):
149
+ return self.actual_instance.to_json()
150
+ else:
151
+ return json.dumps(self.actual_instance)
152
+
153
+ def to_dict(self) -> Optional[Union[Dict[str, Any], PrivateTextMetadataInput, PromptMetadataInput, PublicTextMetadataInput, TranscriptionMetadataInput]]:
154
+ """Returns the dict representation of the actual instance"""
155
+ if self.actual_instance is None:
156
+ return None
157
+
158
+ if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict):
159
+ return self.actual_instance.to_dict()
160
+ else:
161
+ # primitive type
162
+ return self.actual_instance
163
+
164
+ def to_str(self) -> str:
165
+ """Returns the string representation of the actual instance"""
166
+ return pprint.pformat(self.model_dump())
167
+
168
+
@@ -0,0 +1,109 @@
1
+ # coding: utf-8
2
+
3
+ """
4
+ Rapidata.Dataset
5
+
6
+ No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
7
+
8
+ The version of the OpenAPI document: v1
9
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
10
+
11
+ Do not edit the class manually.
12
+ """ # noqa: E501
13
+
14
+
15
+ from __future__ import annotations
16
+ import pprint
17
+ import re # noqa: F401
18
+ import json
19
+
20
+ from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr
21
+ from typing import Any, ClassVar, Dict, List, Optional
22
+ from rapidata.api_client.models.create_datapoint_from_files_model_metadata_inner import CreateDatapointFromFilesModelMetadataInner
23
+ from typing import Optional, Set
24
+ from typing_extensions import Self
25
+
26
+ class CreateDatapointFromTextSourcesModel(BaseModel):
27
+ """
28
+ The body request for uploading text sources to a dataset.
29
+ """ # noqa: E501
30
+ text_sources: List[StrictStr] = Field(description="The text sources to upload.", alias="textSources")
31
+ sort_index: Optional[StrictInt] = Field(default=None, description="The index will be used to keep the datapoints in order. Useful if upload is parallelized", alias="sortIndex")
32
+ metadata: Optional[List[CreateDatapointFromFilesModelMetadataInner]] = Field(default=None, description="Additional metadata to attach to the datapoint. Most commonly used to add a prompt to the datapoint using the Rapidata.Shared.Assets.Abstraction.Models.Metadata.Input.PromptMetadataInput.")
33
+ __properties: ClassVar[List[str]] = ["textSources", "sortIndex", "metadata"]
34
+
35
+ model_config = ConfigDict(
36
+ populate_by_name=True,
37
+ validate_assignment=True,
38
+ protected_namespaces=(),
39
+ )
40
+
41
+
42
+ def to_str(self) -> str:
43
+ """Returns the string representation of the model using alias"""
44
+ return pprint.pformat(self.model_dump(by_alias=True))
45
+
46
+ def to_json(self) -> str:
47
+ """Returns the JSON representation of the model using alias"""
48
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
49
+ return json.dumps(self.to_dict())
50
+
51
+ @classmethod
52
+ def from_json(cls, json_str: str) -> Optional[Self]:
53
+ """Create an instance of CreateDatapointFromTextSourcesModel from a JSON string"""
54
+ return cls.from_dict(json.loads(json_str))
55
+
56
+ def to_dict(self) -> Dict[str, Any]:
57
+ """Return the dictionary representation of the model using alias.
58
+
59
+ This has the following differences from calling pydantic's
60
+ `self.model_dump(by_alias=True)`:
61
+
62
+ * `None` is only added to the output dict for nullable fields that
63
+ were set at model initialization. Other fields with value `None`
64
+ are ignored.
65
+ """
66
+ excluded_fields: Set[str] = set([
67
+ ])
68
+
69
+ _dict = self.model_dump(
70
+ by_alias=True,
71
+ exclude=excluded_fields,
72
+ exclude_none=True,
73
+ )
74
+ # override the default output from pydantic by calling `to_dict()` of each item in metadata (list)
75
+ _items = []
76
+ if self.metadata:
77
+ for _item_metadata in self.metadata:
78
+ if _item_metadata:
79
+ _items.append(_item_metadata.to_dict())
80
+ _dict['metadata'] = _items
81
+ # set to None if sort_index (nullable) is None
82
+ # and model_fields_set contains the field
83
+ if self.sort_index is None and "sort_index" in self.model_fields_set:
84
+ _dict['sortIndex'] = None
85
+
86
+ # set to None if metadata (nullable) is None
87
+ # and model_fields_set contains the field
88
+ if self.metadata is None and "metadata" in self.model_fields_set:
89
+ _dict['metadata'] = None
90
+
91
+ return _dict
92
+
93
+ @classmethod
94
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
95
+ """Create an instance of CreateDatapointFromTextSourcesModel from a dict"""
96
+ if obj is None:
97
+ return None
98
+
99
+ if not isinstance(obj, dict):
100
+ return cls.model_validate(obj)
101
+
102
+ _obj = cls.model_validate({
103
+ "textSources": obj.get("textSources"),
104
+ "sortIndex": obj.get("sortIndex"),
105
+ "metadata": [CreateDatapointFromFilesModelMetadataInner.from_dict(_item) for _item in obj["metadata"]] if obj.get("metadata") is not None else None
106
+ })
107
+ return _obj
108
+
109
+
@@ -19,16 +19,16 @@ import json
19
19
 
20
20
  from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr
21
21
  from typing import Any, ClassVar, Dict, List, Optional
22
- from rapidata.api_client.models.create_datapoint_from_urls_model_metadata_inner import CreateDatapointFromUrlsModelMetadataInner
22
+ from rapidata.api_client.models.create_datapoint_from_files_model_metadata_inner import CreateDatapointFromFilesModelMetadataInner
23
23
  from typing import Optional, Set
24
24
  from typing_extensions import Self
25
25
 
26
26
  class CreateDatapointFromUrlsModel(BaseModel):
27
27
  """
28
- The model for creating a datapoint from urls.
28
+ The body request for creating a datapoint from urls.
29
29
  """ # noqa: E501
30
- urls: List[StrictStr] = Field(description="The urls to fetch the assets from. The urls must be publicly accessible. A HEAD request will be made to each url to check if it is accessible.")
31
- metadata: Optional[List[CreateDatapointFromUrlsModelMetadataInner]] = Field(default=None, description="Additional metadata to attach to the datapoint. Most commonly used to add a prompt to the datapoint using the Rapidata.Shared.Assets.Abstraction.Models.Metadata.Input.PromptMetadataInput.")
30
+ urls: List[StrictStr] = Field(description="The urls to fetch the assets from. The urls must be publicly accessible. A HEAD request will be made to each url to check if it is accessible.")
31
+ metadata: Optional[List[CreateDatapointFromFilesModelMetadataInner]] = Field(default=None, description="Additional metadata to attach to the datapoint. Most commonly used to add a prompt to the datapoint using the Rapidata.Shared.Assets.Abstraction.Models.Metadata.Input.PromptMetadataInput.")
32
32
  sort_index: Optional[StrictInt] = Field(default=None, description="The index will be used to keep the datapoints in order. Useful if upload is parallelized", alias="sortIndex")
33
33
  __properties: ClassVar[List[str]] = ["urls", "metadata", "sortIndex"]
34
34
 
@@ -101,7 +101,7 @@ class CreateDatapointFromUrlsModel(BaseModel):
101
101
 
102
102
  _obj = cls.model_validate({
103
103
  "urls": obj.get("urls"),
104
- "metadata": [CreateDatapointFromUrlsModelMetadataInner.from_dict(_item) for _item in obj["metadata"]] if obj.get("metadata") is not None else None,
104
+ "metadata": [CreateDatapointFromFilesModelMetadataInner.from_dict(_item) for _item in obj["metadata"]] if obj.get("metadata") is not None else None,
105
105
  "sortIndex": obj.get("sortIndex")
106
106
  })
107
107
  return _obj
@@ -0,0 +1,124 @@
1
+ # coding: utf-8
2
+
3
+ """
4
+ Rapidata.Dataset
5
+
6
+ No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
7
+
8
+ The version of the OpenAPI document: v1
9
+ Generated by OpenAPI Generator (https://openapi-generator.tech)
10
+
11
+ Do not edit the class manually.
12
+ """ # noqa: E501
13
+
14
+
15
+ from __future__ import annotations
16
+ import pprint
17
+ import re # noqa: F401
18
+ import json
19
+
20
+ from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr
21
+ from typing import Any, ClassVar, Dict, List, Optional
22
+ from typing import Optional, Set
23
+ from typing_extensions import Self
24
+
25
+ class CreateDatapointsFromS3BucketModel(BaseModel):
26
+ """
27
+ The model for creating datapoints from files in a S3 bucket.
28
+ """ # noqa: E501
29
+ bucket_name: StrictStr = Field(description="The name of the S3 bucket to upload the files from.", alias="bucketName")
30
+ region: Optional[StrictStr] = Field(default=None, description="The region of the S3 bucket.")
31
+ source_prefix: StrictStr = Field(description="The prefix of the files to upload.", alias="sourcePrefix")
32
+ access_key: Optional[StrictStr] = Field(default=None, description="The access key to use for the S3 bucket.", alias="accessKey")
33
+ secret_key: Optional[StrictStr] = Field(default=None, description="The secret key to use for the S3 bucket.", alias="secretKey")
34
+ use_custom_aws_credentials: Optional[StrictBool] = Field(default=None, description="Whether to use custom AWS credentials.", alias="useCustomAwsCredentials")
35
+ clear_dataset: Optional[StrictBool] = Field(default=None, description="Whether to clear the dataset before uploading the files.", alias="clearDataset")
36
+ __properties: ClassVar[List[str]] = ["bucketName", "region", "sourcePrefix", "accessKey", "secretKey", "useCustomAwsCredentials", "clearDataset"]
37
+
38
+ model_config = ConfigDict(
39
+ populate_by_name=True,
40
+ validate_assignment=True,
41
+ protected_namespaces=(),
42
+ )
43
+
44
+
45
+ def to_str(self) -> str:
46
+ """Returns the string representation of the model using alias"""
47
+ return pprint.pformat(self.model_dump(by_alias=True))
48
+
49
+ def to_json(self) -> str:
50
+ """Returns the JSON representation of the model using alias"""
51
+ # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
52
+ return json.dumps(self.to_dict())
53
+
54
+ @classmethod
55
+ def from_json(cls, json_str: str) -> Optional[Self]:
56
+ """Create an instance of CreateDatapointsFromS3BucketModel from a JSON string"""
57
+ return cls.from_dict(json.loads(json_str))
58
+
59
+ def to_dict(self) -> Dict[str, Any]:
60
+ """Return the dictionary representation of the model using alias.
61
+
62
+ This has the following differences from calling pydantic's
63
+ `self.model_dump(by_alias=True)`:
64
+
65
+ * `None` is only added to the output dict for nullable fields that
66
+ were set at model initialization. Other fields with value `None`
67
+ are ignored.
68
+ """
69
+ excluded_fields: Set[str] = set([
70
+ ])
71
+
72
+ _dict = self.model_dump(
73
+ by_alias=True,
74
+ exclude=excluded_fields,
75
+ exclude_none=True,
76
+ )
77
+ # set to None if region (nullable) is None
78
+ # and model_fields_set contains the field
79
+ if self.region is None and "region" in self.model_fields_set:
80
+ _dict['region'] = None
81
+
82
+ # set to None if access_key (nullable) is None
83
+ # and model_fields_set contains the field
84
+ if self.access_key is None and "access_key" in self.model_fields_set:
85
+ _dict['accessKey'] = None
86
+
87
+ # set to None if secret_key (nullable) is None
88
+ # and model_fields_set contains the field
89
+ if self.secret_key is None and "secret_key" in self.model_fields_set:
90
+ _dict['secretKey'] = None
91
+
92
+ # set to None if use_custom_aws_credentials (nullable) is None
93
+ # and model_fields_set contains the field
94
+ if self.use_custom_aws_credentials is None and "use_custom_aws_credentials" in self.model_fields_set:
95
+ _dict['useCustomAwsCredentials'] = None
96
+
97
+ # set to None if clear_dataset (nullable) is None
98
+ # and model_fields_set contains the field
99
+ if self.clear_dataset is None and "clear_dataset" in self.model_fields_set:
100
+ _dict['clearDataset'] = None
101
+
102
+ return _dict
103
+
104
+ @classmethod
105
+ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
106
+ """Create an instance of CreateDatapointsFromS3BucketModel from a dict"""
107
+ if obj is None:
108
+ return None
109
+
110
+ if not isinstance(obj, dict):
111
+ return cls.model_validate(obj)
112
+
113
+ _obj = cls.model_validate({
114
+ "bucketName": obj.get("bucketName"),
115
+ "region": obj.get("region"),
116
+ "sourcePrefix": obj.get("sourcePrefix"),
117
+ "accessKey": obj.get("accessKey"),
118
+ "secretKey": obj.get("secretKey"),
119
+ "useCustomAwsCredentials": obj.get("useCustomAwsCredentials"),
120
+ "clearDataset": obj.get("clearDataset")
121
+ })
122
+ return _obj
123
+
124
+