orca-sdk 0.0.95__py3-none-any.whl → 0.0.97__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. orca_sdk/__init__.py +1 -5
  2. orca_sdk/_generated_api_client/api/__init__.py +22 -2
  3. orca_sdk/_generated_api_client/api/{datasource/create_datasource_datasource_post.py → auth/create_org_plan_auth_org_plan_post.py} +32 -31
  4. orca_sdk/_generated_api_client/api/auth/get_org_plan_auth_org_plan_get.py +122 -0
  5. orca_sdk/_generated_api_client/api/auth/update_org_plan_auth_org_plan_put.py +168 -0
  6. orca_sdk/_generated_api_client/api/datasource/create_datasource_from_content_datasource_post.py +224 -0
  7. orca_sdk/_generated_api_client/api/datasource/create_datasource_from_files_datasource_upload_post.py +229 -0
  8. orca_sdk/_generated_api_client/api/task/list_tasks_task_get.py +21 -26
  9. orca_sdk/_generated_api_client/api/telemetry/generate_memory_suggestions_telemetry_prediction_prediction_id_memory_suggestions_post.py +239 -0
  10. orca_sdk/_generated_api_client/api/telemetry/get_action_recommendation_telemetry_prediction_prediction_id_action_get.py +192 -0
  11. orca_sdk/_generated_api_client/models/__init__.py +54 -4
  12. orca_sdk/_generated_api_client/models/action_recommendation.py +82 -0
  13. orca_sdk/_generated_api_client/models/action_recommendation_action.py +11 -0
  14. orca_sdk/_generated_api_client/models/add_memory_recommendations.py +85 -0
  15. orca_sdk/_generated_api_client/models/add_memory_suggestion.py +79 -0
  16. orca_sdk/_generated_api_client/models/body_create_datasource_from_files_datasource_upload_post.py +145 -0
  17. orca_sdk/_generated_api_client/models/class_representatives.py +92 -0
  18. orca_sdk/_generated_api_client/models/classification_model_metadata.py +14 -0
  19. orca_sdk/_generated_api_client/models/clone_memoryset_request.py +40 -0
  20. orca_sdk/_generated_api_client/models/constraint_violation_error_response.py +8 -7
  21. orca_sdk/_generated_api_client/models/constraint_violation_error_response_status_code.py +8 -0
  22. orca_sdk/_generated_api_client/models/create_classification_model_request.py +40 -0
  23. orca_sdk/_generated_api_client/models/create_datasource_from_content_request.py +101 -0
  24. orca_sdk/_generated_api_client/models/create_memoryset_request.py +40 -0
  25. orca_sdk/_generated_api_client/models/create_org_plan_request.py +73 -0
  26. orca_sdk/_generated_api_client/models/create_org_plan_request_tier.py +11 -0
  27. orca_sdk/_generated_api_client/models/create_regression_model_request.py +20 -0
  28. orca_sdk/_generated_api_client/models/embed_request.py +20 -0
  29. orca_sdk/_generated_api_client/models/embedding_evaluation_payload.py +28 -10
  30. orca_sdk/_generated_api_client/models/embedding_evaluation_request.py +28 -10
  31. orca_sdk/_generated_api_client/models/embedding_model_result.py +9 -0
  32. orca_sdk/_generated_api_client/models/filter_item.py +31 -23
  33. orca_sdk/_generated_api_client/models/filter_item_field_type_1_item_type_0.py +8 -0
  34. orca_sdk/_generated_api_client/models/filter_item_field_type_2_item_type_0.py +8 -0
  35. orca_sdk/_generated_api_client/models/filter_item_field_type_2_item_type_1.py +2 -0
  36. orca_sdk/_generated_api_client/models/internal_server_error_response.py +8 -7
  37. orca_sdk/_generated_api_client/models/internal_server_error_response_status_code.py +8 -0
  38. orca_sdk/_generated_api_client/models/labeled_memory.py +5 -5
  39. orca_sdk/_generated_api_client/models/labeled_memory_update.py +16 -16
  40. orca_sdk/_generated_api_client/models/labeled_memory_with_feedback_metrics.py +5 -5
  41. orca_sdk/_generated_api_client/models/lookup_request.py +20 -0
  42. orca_sdk/_generated_api_client/models/memory_metrics.py +98 -0
  43. orca_sdk/_generated_api_client/models/memoryset_analysis_configs.py +33 -0
  44. orca_sdk/_generated_api_client/models/memoryset_class_patterns_analysis_config.py +79 -0
  45. orca_sdk/_generated_api_client/models/memoryset_class_patterns_metrics.py +138 -0
  46. orca_sdk/_generated_api_client/models/memoryset_metadata.py +42 -0
  47. orca_sdk/_generated_api_client/models/memoryset_metrics.py +33 -0
  48. orca_sdk/_generated_api_client/models/memoryset_update.py +20 -0
  49. orca_sdk/_generated_api_client/models/not_found_error_response.py +6 -7
  50. orca_sdk/_generated_api_client/models/not_found_error_response_resource_type_0.py +1 -0
  51. orca_sdk/_generated_api_client/models/not_found_error_response_status_code.py +8 -0
  52. orca_sdk/_generated_api_client/models/org_plan.py +99 -0
  53. orca_sdk/_generated_api_client/models/org_plan_tier.py +11 -0
  54. orca_sdk/_generated_api_client/models/paginated_task.py +108 -0
  55. orca_sdk/_generated_api_client/models/predictive_model_update.py +20 -0
  56. orca_sdk/_generated_api_client/models/pretrained_embedding_model_metadata.py +8 -0
  57. orca_sdk/_generated_api_client/models/regression_model_metadata.py +14 -0
  58. orca_sdk/_generated_api_client/models/scored_memory_update.py +9 -9
  59. orca_sdk/_generated_api_client/models/service_unavailable_error_response.py +8 -7
  60. orca_sdk/_generated_api_client/models/service_unavailable_error_response_status_code.py +8 -0
  61. orca_sdk/_generated_api_client/models/telemetry_field_type_0_item_type_0.py +8 -0
  62. orca_sdk/_generated_api_client/models/telemetry_field_type_1_item_type_0.py +8 -0
  63. orca_sdk/_generated_api_client/models/telemetry_field_type_1_item_type_1.py +8 -0
  64. orca_sdk/_generated_api_client/models/telemetry_filter_item.py +42 -30
  65. orca_sdk/_generated_api_client/models/telemetry_sort_options.py +42 -30
  66. orca_sdk/_generated_api_client/models/unauthenticated_error_response.py +8 -7
  67. orca_sdk/_generated_api_client/models/unauthenticated_error_response_status_code.py +8 -0
  68. orca_sdk/_generated_api_client/models/unauthorized_error_response.py +8 -7
  69. orca_sdk/_generated_api_client/models/unauthorized_error_response_status_code.py +8 -0
  70. orca_sdk/_generated_api_client/models/update_org_plan_request.py +73 -0
  71. orca_sdk/_generated_api_client/models/update_org_plan_request_tier.py +11 -0
  72. orca_sdk/_shared/metrics.py +1 -1
  73. orca_sdk/classification_model.py +4 -1
  74. orca_sdk/classification_model_test.py +53 -0
  75. orca_sdk/credentials.py +15 -1
  76. orca_sdk/datasource.py +180 -41
  77. orca_sdk/datasource_test.py +194 -0
  78. orca_sdk/embedding_model.py +51 -13
  79. orca_sdk/embedding_model_test.py +27 -0
  80. orca_sdk/job.py +15 -14
  81. orca_sdk/job_test.py +34 -0
  82. orca_sdk/memoryset.py +47 -7
  83. orca_sdk/regression_model_test.py +0 -1
  84. orca_sdk/telemetry.py +94 -3
  85. {orca_sdk-0.0.95.dist-info → orca_sdk-0.0.97.dist-info}/METADATA +18 -1
  86. {orca_sdk-0.0.95.dist-info → orca_sdk-0.0.97.dist-info}/RECORD +87 -56
  87. orca_sdk/_generated_api_client/models/body_create_datasource_datasource_post.py +0 -207
  88. orca_sdk/_generated_api_client/models/labeled_memory_metrics.py +0 -246
  89. {orca_sdk-0.0.95.dist-info → orca_sdk-0.0.97.dist-info}/WHEEL +0 -0
@@ -0,0 +1,8 @@
1
+ from enum import IntEnum
2
+
3
+
4
+ class ConstraintViolationErrorResponseStatusCode(IntEnum):
5
+ VALUE_409 = 409
6
+
7
+ def __str__(self) -> str:
8
+ return str(self.value)
@@ -28,6 +28,8 @@ class CreateClassificationModelRequest:
28
28
  name (str):
29
29
  memoryset_id (str):
30
30
  description (Union[None, Unset, str]):
31
+ notes (Union[None, Unset, str]):
32
+ memoryset_name (Union[None, Unset, str]):
31
33
  memory_lookup_count (Union[None, Unset, int]):
32
34
  head_type (Union[Unset, RACHeadType]):
33
35
  weigh_memories (Union[None, Unset, bool]):
@@ -38,6 +40,8 @@ class CreateClassificationModelRequest:
38
40
  name: str
39
41
  memoryset_id: str
40
42
  description: Union[None, Unset, str] = UNSET
43
+ notes: Union[None, Unset, str] = UNSET
44
+ memoryset_name: Union[None, Unset, str] = UNSET
41
45
  memory_lookup_count: Union[None, Unset, int] = UNSET
42
46
  head_type: Union[Unset, RACHeadType] = UNSET
43
47
  weigh_memories: Union[None, Unset, bool] = UNSET
@@ -56,6 +60,18 @@ class CreateClassificationModelRequest:
56
60
  else:
57
61
  description = self.description
58
62
 
63
+ notes: Union[None, Unset, str]
64
+ if isinstance(self.notes, Unset):
65
+ notes = UNSET
66
+ else:
67
+ notes = self.notes
68
+
69
+ memoryset_name: Union[None, Unset, str]
70
+ if isinstance(self.memoryset_name, Unset):
71
+ memoryset_name = UNSET
72
+ else:
73
+ memoryset_name = self.memoryset_name
74
+
59
75
  memory_lookup_count: Union[None, Unset, int]
60
76
  if isinstance(self.memory_lookup_count, Unset):
61
77
  memory_lookup_count = UNSET
@@ -94,6 +110,10 @@ class CreateClassificationModelRequest:
94
110
  )
95
111
  if description is not UNSET:
96
112
  field_dict["description"] = description
113
+ if notes is not UNSET:
114
+ field_dict["notes"] = notes
115
+ if memoryset_name is not UNSET:
116
+ field_dict["memoryset_name"] = memoryset_name
97
117
  if memory_lookup_count is not UNSET:
98
118
  field_dict["memory_lookup_count"] = memory_lookup_count
99
119
  if head_type is not UNSET:
@@ -123,6 +143,24 @@ class CreateClassificationModelRequest:
123
143
 
124
144
  description = _parse_description(d.pop("description", UNSET))
125
145
 
146
+ def _parse_notes(data: object) -> Union[None, Unset, str]:
147
+ if data is None:
148
+ return data
149
+ if isinstance(data, Unset):
150
+ return data
151
+ return cast(Union[None, Unset, str], data)
152
+
153
+ notes = _parse_notes(d.pop("notes", UNSET))
154
+
155
+ def _parse_memoryset_name(data: object) -> Union[None, Unset, str]:
156
+ if data is None:
157
+ return data
158
+ if isinstance(data, Unset):
159
+ return data
160
+ return cast(Union[None, Unset, str], data)
161
+
162
+ memoryset_name = _parse_memoryset_name(d.pop("memoryset_name", UNSET))
163
+
126
164
  def _parse_memory_lookup_count(data: object) -> Union[None, Unset, int]:
127
165
  if data is None:
128
166
  return data
@@ -170,6 +208,8 @@ class CreateClassificationModelRequest:
170
208
  name=name,
171
209
  memoryset_id=memoryset_id,
172
210
  description=description,
211
+ notes=notes,
212
+ memoryset_name=memoryset_name,
173
213
  memory_lookup_count=memory_lookup_count,
174
214
  head_type=head_type,
175
215
  weigh_memories=weigh_memories,
@@ -0,0 +1,101 @@
1
+ """
2
+ This file is generated by the openapi-python-client tool via the generate_api_client.py script
3
+
4
+ It is a customized template from the openapi-python-client tool's default template:
5
+ https://github.com/openapi-generators/openapi-python-client/blob/861ef5622f10fc96d240dc9becb0edf94e61446c/openapi_python_client/templates/model.py.jinja
6
+
7
+ The main change is:
8
+ - Fix typing issues
9
+ """
10
+
11
+ # flake8: noqa: C901
12
+
13
+ from typing import Any, Type, TypeVar, Union, cast
14
+
15
+ from attrs import define as _attrs_define
16
+ from attrs import field as _attrs_field
17
+
18
+ from ..types import UNSET, Unset
19
+
20
+ T = TypeVar("T", bound="CreateDatasourceFromContentRequest")
21
+
22
+
23
+ @_attrs_define
24
+ class CreateDatasourceFromContentRequest:
25
+ """Request model for creating a datasource from JSON content.
26
+
27
+ Attributes:
28
+ name (str):
29
+ content (Any):
30
+ description (Union[None, Unset, str]):
31
+ """
32
+
33
+ name: str
34
+ content: Any
35
+ description: Union[None, Unset, str] = UNSET
36
+ additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
37
+
38
+ def to_dict(self) -> dict[str, Any]:
39
+ name = self.name
40
+
41
+ content = self.content
42
+
43
+ description: Union[None, Unset, str]
44
+ if isinstance(self.description, Unset):
45
+ description = UNSET
46
+ else:
47
+ description = self.description
48
+
49
+ field_dict: dict[str, Any] = {}
50
+ field_dict.update(self.additional_properties)
51
+ field_dict.update(
52
+ {
53
+ "name": name,
54
+ "content": content,
55
+ }
56
+ )
57
+ if description is not UNSET:
58
+ field_dict["description"] = description
59
+
60
+ return field_dict
61
+
62
+ @classmethod
63
+ def from_dict(cls: Type[T], src_dict: dict[str, Any]) -> T:
64
+ d = src_dict.copy()
65
+ name = d.pop("name")
66
+
67
+ content = d.pop("content")
68
+
69
+ def _parse_description(data: object) -> Union[None, Unset, str]:
70
+ if data is None:
71
+ return data
72
+ if isinstance(data, Unset):
73
+ return data
74
+ return cast(Union[None, Unset, str], data)
75
+
76
+ description = _parse_description(d.pop("description", UNSET))
77
+
78
+ create_datasource_from_content_request = cls(
79
+ name=name,
80
+ content=content,
81
+ description=description,
82
+ )
83
+
84
+ create_datasource_from_content_request.additional_properties = d
85
+ return create_datasource_from_content_request
86
+
87
+ @property
88
+ def additional_keys(self) -> list[str]:
89
+ return list(self.additional_properties.keys())
90
+
91
+ def __getitem__(self, key: str) -> Any:
92
+ return self.additional_properties[key]
93
+
94
+ def __setitem__(self, key: str, value: Any) -> None:
95
+ self.additional_properties[key] = value
96
+
97
+ def __delitem__(self, key: str) -> None:
98
+ del self.additional_properties[key]
99
+
100
+ def __contains__(self, key: str) -> bool:
101
+ return key in self.additional_properties
@@ -35,6 +35,7 @@ class CreateMemorysetRequest:
35
35
  datasource_id (str):
36
36
  datasource_value_column (str):
37
37
  description (Union[None, Unset, str]):
38
+ notes (Union[None, Unset, str]):
38
39
  datasource_label_column (Union[None, Unset, str]):
39
40
  datasource_score_column (Union[None, Unset, str]):
40
41
  datasource_source_id_column (Union[None, Unset, str]):
@@ -45,12 +46,14 @@ class CreateMemorysetRequest:
45
46
  label_names (Union[List[str], None, Unset]):
46
47
  index_type (Union[Unset, CreateMemorysetRequestIndexType]): Default: CreateMemorysetRequestIndexType.FLAT.
47
48
  index_params (Union[Unset, CreateMemorysetRequestIndexParams]):
49
+ prompt (Union[None, Unset, str]):
48
50
  """
49
51
 
50
52
  name: str
51
53
  datasource_id: str
52
54
  datasource_value_column: str
53
55
  description: Union[None, Unset, str] = UNSET
56
+ notes: Union[None, Unset, str] = UNSET
54
57
  datasource_label_column: Union[None, Unset, str] = UNSET
55
58
  datasource_score_column: Union[None, Unset, str] = UNSET
56
59
  datasource_source_id_column: Union[None, Unset, str] = UNSET
@@ -61,6 +64,7 @@ class CreateMemorysetRequest:
61
64
  label_names: Union[List[str], None, Unset] = UNSET
62
65
  index_type: Union[Unset, CreateMemorysetRequestIndexType] = CreateMemorysetRequestIndexType.FLAT
63
66
  index_params: Union[Unset, "CreateMemorysetRequestIndexParams"] = UNSET
67
+ prompt: Union[None, Unset, str] = UNSET
64
68
  additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
65
69
 
66
70
  def to_dict(self) -> dict[str, Any]:
@@ -76,6 +80,12 @@ class CreateMemorysetRequest:
76
80
  else:
77
81
  description = self.description
78
82
 
83
+ notes: Union[None, Unset, str]
84
+ if isinstance(self.notes, Unset):
85
+ notes = UNSET
86
+ else:
87
+ notes = self.notes
88
+
79
89
  datasource_label_column: Union[None, Unset, str]
80
90
  if isinstance(self.datasource_label_column, Unset):
81
91
  datasource_label_column = UNSET
@@ -137,6 +147,12 @@ class CreateMemorysetRequest:
137
147
  if not isinstance(self.index_params, Unset):
138
148
  index_params = self.index_params.to_dict()
139
149
 
150
+ prompt: Union[None, Unset, str]
151
+ if isinstance(self.prompt, Unset):
152
+ prompt = UNSET
153
+ else:
154
+ prompt = self.prompt
155
+
140
156
  field_dict: dict[str, Any] = {}
141
157
  field_dict.update(self.additional_properties)
142
158
  field_dict.update(
@@ -148,6 +164,8 @@ class CreateMemorysetRequest:
148
164
  )
149
165
  if description is not UNSET:
150
166
  field_dict["description"] = description
167
+ if notes is not UNSET:
168
+ field_dict["notes"] = notes
151
169
  if datasource_label_column is not UNSET:
152
170
  field_dict["datasource_label_column"] = datasource_label_column
153
171
  if datasource_score_column is not UNSET:
@@ -168,6 +186,8 @@ class CreateMemorysetRequest:
168
186
  field_dict["index_type"] = index_type
169
187
  if index_params is not UNSET:
170
188
  field_dict["index_params"] = index_params
189
+ if prompt is not UNSET:
190
+ field_dict["prompt"] = prompt
171
191
 
172
192
  return field_dict
173
193
 
@@ -191,6 +211,15 @@ class CreateMemorysetRequest:
191
211
 
192
212
  description = _parse_description(d.pop("description", UNSET))
193
213
 
214
+ def _parse_notes(data: object) -> Union[None, Unset, str]:
215
+ if data is None:
216
+ return data
217
+ if isinstance(data, Unset):
218
+ return data
219
+ return cast(Union[None, Unset, str], data)
220
+
221
+ notes = _parse_notes(d.pop("notes", UNSET))
222
+
194
223
  def _parse_datasource_label_column(data: object) -> Union[None, Unset, str]:
195
224
  if data is None:
196
225
  return data
@@ -288,11 +317,21 @@ class CreateMemorysetRequest:
288
317
  else:
289
318
  index_params = CreateMemorysetRequestIndexParams.from_dict(_index_params)
290
319
 
320
+ def _parse_prompt(data: object) -> Union[None, Unset, str]:
321
+ if data is None:
322
+ return data
323
+ if isinstance(data, Unset):
324
+ return data
325
+ return cast(Union[None, Unset, str], data)
326
+
327
+ prompt = _parse_prompt(d.pop("prompt", UNSET))
328
+
291
329
  create_memoryset_request = cls(
292
330
  name=name,
293
331
  datasource_id=datasource_id,
294
332
  datasource_value_column=datasource_value_column,
295
333
  description=description,
334
+ notes=notes,
296
335
  datasource_label_column=datasource_label_column,
297
336
  datasource_score_column=datasource_score_column,
298
337
  datasource_source_id_column=datasource_source_id_column,
@@ -303,6 +342,7 @@ class CreateMemorysetRequest:
303
342
  label_names=label_names,
304
343
  index_type=index_type,
305
344
  index_params=index_params,
345
+ prompt=prompt,
306
346
  )
307
347
 
308
348
  create_memoryset_request.additional_properties = d
@@ -0,0 +1,73 @@
1
+ """
2
+ This file is generated by the openapi-python-client tool via the generate_api_client.py script
3
+
4
+ It is a customized template from the openapi-python-client tool's default template:
5
+ https://github.com/openapi-generators/openapi-python-client/blob/861ef5622f10fc96d240dc9becb0edf94e61446c/openapi_python_client/templates/model.py.jinja
6
+
7
+ The main change is:
8
+ - Fix typing issues
9
+ """
10
+
11
+ # flake8: noqa: C901
12
+
13
+ from enum import Enum
14
+ from typing import Any, Type, TypeVar
15
+
16
+ from attrs import define as _attrs_define
17
+ from attrs import field as _attrs_field
18
+
19
+ from ..models.create_org_plan_request_tier import CreateOrgPlanRequestTier
20
+
21
+ T = TypeVar("T", bound="CreateOrgPlanRequest")
22
+
23
+
24
+ @_attrs_define
25
+ class CreateOrgPlanRequest:
26
+ """
27
+ Attributes:
28
+ tier (CreateOrgPlanRequestTier):
29
+ """
30
+
31
+ tier: CreateOrgPlanRequestTier
32
+ additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
33
+
34
+ def to_dict(self) -> dict[str, Any]:
35
+ tier = self.tier.value if isinstance(self.tier, Enum) else self.tier
36
+
37
+ field_dict: dict[str, Any] = {}
38
+ field_dict.update(self.additional_properties)
39
+ field_dict.update(
40
+ {
41
+ "tier": tier,
42
+ }
43
+ )
44
+
45
+ return field_dict
46
+
47
+ @classmethod
48
+ def from_dict(cls: Type[T], src_dict: dict[str, Any]) -> T:
49
+ d = src_dict.copy()
50
+ tier = CreateOrgPlanRequestTier(d.pop("tier"))
51
+
52
+ create_org_plan_request = cls(
53
+ tier=tier,
54
+ )
55
+
56
+ create_org_plan_request.additional_properties = d
57
+ return create_org_plan_request
58
+
59
+ @property
60
+ def additional_keys(self) -> list[str]:
61
+ return list(self.additional_properties.keys())
62
+
63
+ def __getitem__(self, key: str) -> Any:
64
+ return self.additional_properties[key]
65
+
66
+ def __setitem__(self, key: str, value: Any) -> None:
67
+ self.additional_properties[key] = value
68
+
69
+ def __delitem__(self, key: str) -> None:
70
+ del self.additional_properties[key]
71
+
72
+ def __contains__(self, key: str) -> bool:
73
+ return key in self.additional_properties
@@ -0,0 +1,11 @@
1
+ from enum import Enum
2
+
3
+
4
+ class CreateOrgPlanRequestTier(str, Enum):
5
+ CANCELLED = "CANCELLED"
6
+ ENTERPRISE = "ENTERPRISE"
7
+ FREE = "FREE"
8
+ PRO = "PRO"
9
+
10
+ def __str__(self) -> str:
11
+ return str(self.value)
@@ -28,6 +28,7 @@ class CreateRegressionModelRequest:
28
28
  name (str):
29
29
  memoryset_id (str):
30
30
  description (Union[None, Unset, str]):
31
+ notes (Union[None, Unset, str]):
31
32
  memory_lookup_count (Union[None, Unset, int]):
32
33
  head_type (Union[Unset, RARHeadType]):
33
34
  """
@@ -35,6 +36,7 @@ class CreateRegressionModelRequest:
35
36
  name: str
36
37
  memoryset_id: str
37
38
  description: Union[None, Unset, str] = UNSET
39
+ notes: Union[None, Unset, str] = UNSET
38
40
  memory_lookup_count: Union[None, Unset, int] = UNSET
39
41
  head_type: Union[Unset, RARHeadType] = UNSET
40
42
  additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
@@ -50,6 +52,12 @@ class CreateRegressionModelRequest:
50
52
  else:
51
53
  description = self.description
52
54
 
55
+ notes: Union[None, Unset, str]
56
+ if isinstance(self.notes, Unset):
57
+ notes = UNSET
58
+ else:
59
+ notes = self.notes
60
+
53
61
  memory_lookup_count: Union[None, Unset, int]
54
62
  if isinstance(self.memory_lookup_count, Unset):
55
63
  memory_lookup_count = UNSET
@@ -70,6 +78,8 @@ class CreateRegressionModelRequest:
70
78
  )
71
79
  if description is not UNSET:
72
80
  field_dict["description"] = description
81
+ if notes is not UNSET:
82
+ field_dict["notes"] = notes
73
83
  if memory_lookup_count is not UNSET:
74
84
  field_dict["memory_lookup_count"] = memory_lookup_count
75
85
  if head_type is not UNSET:
@@ -93,6 +103,15 @@ class CreateRegressionModelRequest:
93
103
 
94
104
  description = _parse_description(d.pop("description", UNSET))
95
105
 
106
+ def _parse_notes(data: object) -> Union[None, Unset, str]:
107
+ if data is None:
108
+ return data
109
+ if isinstance(data, Unset):
110
+ return data
111
+ return cast(Union[None, Unset, str], data)
112
+
113
+ notes = _parse_notes(d.pop("notes", UNSET))
114
+
96
115
  def _parse_memory_lookup_count(data: object) -> Union[None, Unset, int]:
97
116
  if data is None:
98
117
  return data
@@ -113,6 +132,7 @@ class CreateRegressionModelRequest:
113
132
  name=name,
114
133
  memoryset_id=memoryset_id,
115
134
  description=description,
135
+ notes=notes,
116
136
  memory_lookup_count=memory_lookup_count,
117
137
  head_type=head_type,
118
138
  )
@@ -26,10 +26,12 @@ class EmbedRequest:
26
26
  Attributes:
27
27
  values (List[str]):
28
28
  max_seq_length (Union[None, Unset, int]):
29
+ prompt (Union[None, Unset, str]):
29
30
  """
30
31
 
31
32
  values: List[str]
32
33
  max_seq_length: Union[None, Unset, int] = UNSET
34
+ prompt: Union[None, Unset, str] = UNSET
33
35
  additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
34
36
 
35
37
  def to_dict(self) -> dict[str, Any]:
@@ -43,6 +45,12 @@ class EmbedRequest:
43
45
  else:
44
46
  max_seq_length = self.max_seq_length
45
47
 
48
+ prompt: Union[None, Unset, str]
49
+ if isinstance(self.prompt, Unset):
50
+ prompt = UNSET
51
+ else:
52
+ prompt = self.prompt
53
+
46
54
  field_dict: dict[str, Any] = {}
47
55
  field_dict.update(self.additional_properties)
48
56
  field_dict.update(
@@ -52,6 +60,8 @@ class EmbedRequest:
52
60
  )
53
61
  if max_seq_length is not UNSET:
54
62
  field_dict["max_seq_length"] = max_seq_length
63
+ if prompt is not UNSET:
64
+ field_dict["prompt"] = prompt
55
65
 
56
66
  return field_dict
57
67
 
@@ -110,9 +120,19 @@ class EmbedRequest:
110
120
 
111
121
  max_seq_length = _parse_max_seq_length(d.pop("max_seq_length", UNSET))
112
122
 
123
+ def _parse_prompt(data: object) -> Union[None, Unset, str]:
124
+ if data is None:
125
+ return data
126
+ if isinstance(data, Unset):
127
+ return data
128
+ return cast(Union[None, Unset, str], data)
129
+
130
+ prompt = _parse_prompt(d.pop("prompt", UNSET))
131
+
113
132
  embed_request = cls(
114
133
  values=values,
115
134
  max_seq_length=max_seq_length,
135
+ prompt=prompt,
116
136
  )
117
137
 
118
138
  embed_request.additional_properties = d
@@ -32,7 +32,7 @@ class EmbeddingEvaluationPayload:
32
32
  datasource_id (str):
33
33
  neighbor_count (Union[Unset, int]): Default: 5.
34
34
  label_names (Union[List[str], None, Unset]):
35
- embedding_models (Union[List[PretrainedEmbeddingModelName], None, Unset]):
35
+ embedding_models (Union[List[Union[PretrainedEmbeddingModelName, str]], None, Unset]):
36
36
  """
37
37
 
38
38
  value_column: str
@@ -41,7 +41,7 @@ class EmbeddingEvaluationPayload:
41
41
  datasource_id: str
42
42
  neighbor_count: Union[Unset, int] = 5
43
43
  label_names: Union[List[str], None, Unset] = UNSET
44
- embedding_models: Union[List[PretrainedEmbeddingModelName], None, Unset] = UNSET
44
+ embedding_models: Union[List[Union[PretrainedEmbeddingModelName, str]], None, Unset] = UNSET
45
45
  additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
46
46
 
47
47
  def to_dict(self) -> dict[str, Any]:
@@ -71,11 +71,15 @@ class EmbeddingEvaluationPayload:
71
71
  elif isinstance(self.embedding_models, list):
72
72
  embedding_models = []
73
73
  for embedding_models_type_0_item_data in self.embedding_models:
74
- embedding_models_type_0_item = (
75
- embedding_models_type_0_item_data.value
76
- if isinstance(embedding_models_type_0_item_data, Enum)
77
- else embedding_models_type_0_item_data
78
- )
74
+ embedding_models_type_0_item: str
75
+ if isinstance(embedding_models_type_0_item_data, PretrainedEmbeddingModelName):
76
+ embedding_models_type_0_item = (
77
+ embedding_models_type_0_item_data.value
78
+ if isinstance(embedding_models_type_0_item_data, Enum)
79
+ else embedding_models_type_0_item_data
80
+ )
81
+ else:
82
+ embedding_models_type_0_item = embedding_models_type_0_item_data
79
83
  embedding_models.append(embedding_models_type_0_item)
80
84
 
81
85
  else:
@@ -135,7 +139,7 @@ class EmbeddingEvaluationPayload:
135
139
 
136
140
  label_names = _parse_label_names(d.pop("label_names", UNSET))
137
141
 
138
- def _parse_embedding_models(data: object) -> Union[List[PretrainedEmbeddingModelName], None, Unset]:
142
+ def _parse_embedding_models(data: object) -> Union[List[Union[PretrainedEmbeddingModelName, str]], None, Unset]:
139
143
  if data is None:
140
144
  return data
141
145
  if isinstance(data, Unset):
@@ -146,14 +150,28 @@ class EmbeddingEvaluationPayload:
146
150
  embedding_models_type_0 = []
147
151
  _embedding_models_type_0 = data
148
152
  for embedding_models_type_0_item_data in _embedding_models_type_0:
149
- embedding_models_type_0_item = PretrainedEmbeddingModelName(embedding_models_type_0_item_data)
153
+
154
+ def _parse_embedding_models_type_0_item(data: object) -> Union[PretrainedEmbeddingModelName, str]:
155
+ try:
156
+ if not isinstance(data, str):
157
+ raise TypeError()
158
+ embedding_models_type_0_item_type_0 = PretrainedEmbeddingModelName(data)
159
+
160
+ return embedding_models_type_0_item_type_0
161
+ except: # noqa: E722
162
+ pass
163
+ return cast(Union[PretrainedEmbeddingModelName, str], data)
164
+
165
+ embedding_models_type_0_item = _parse_embedding_models_type_0_item(
166
+ embedding_models_type_0_item_data
167
+ )
150
168
 
151
169
  embedding_models_type_0.append(embedding_models_type_0_item)
152
170
 
153
171
  return embedding_models_type_0
154
172
  except: # noqa: E722
155
173
  pass
156
- return cast(Union[List[PretrainedEmbeddingModelName], None, Unset], data)
174
+ return cast(Union[List[Union[PretrainedEmbeddingModelName, str]], None, Unset], data)
157
175
 
158
176
  embedding_models = _parse_embedding_models(d.pop("embedding_models", UNSET))
159
177
 
@@ -31,7 +31,7 @@ class EmbeddingEvaluationRequest:
31
31
  source_id_column (Union[None, str]):
32
32
  neighbor_count (Union[Unset, int]): Default: 5.
33
33
  label_names (Union[List[str], None, Unset]):
34
- embedding_models (Union[List[PretrainedEmbeddingModelName], None, Unset]):
34
+ embedding_models (Union[List[Union[PretrainedEmbeddingModelName, str]], None, Unset]):
35
35
  """
36
36
 
37
37
  value_column: str
@@ -39,7 +39,7 @@ class EmbeddingEvaluationRequest:
39
39
  source_id_column: Union[None, str]
40
40
  neighbor_count: Union[Unset, int] = 5
41
41
  label_names: Union[List[str], None, Unset] = UNSET
42
- embedding_models: Union[List[PretrainedEmbeddingModelName], None, Unset] = UNSET
42
+ embedding_models: Union[List[Union[PretrainedEmbeddingModelName, str]], None, Unset] = UNSET
43
43
  additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
44
44
 
45
45
  def to_dict(self) -> dict[str, Any]:
@@ -67,11 +67,15 @@ class EmbeddingEvaluationRequest:
67
67
  elif isinstance(self.embedding_models, list):
68
68
  embedding_models = []
69
69
  for embedding_models_type_0_item_data in self.embedding_models:
70
- embedding_models_type_0_item = (
71
- embedding_models_type_0_item_data.value
72
- if isinstance(embedding_models_type_0_item_data, Enum)
73
- else embedding_models_type_0_item_data
74
- )
70
+ embedding_models_type_0_item: str
71
+ if isinstance(embedding_models_type_0_item_data, PretrainedEmbeddingModelName):
72
+ embedding_models_type_0_item = (
73
+ embedding_models_type_0_item_data.value
74
+ if isinstance(embedding_models_type_0_item_data, Enum)
75
+ else embedding_models_type_0_item_data
76
+ )
77
+ else:
78
+ embedding_models_type_0_item = embedding_models_type_0_item_data
75
79
  embedding_models.append(embedding_models_type_0_item)
76
80
 
77
81
  else:
@@ -128,7 +132,7 @@ class EmbeddingEvaluationRequest:
128
132
 
129
133
  label_names = _parse_label_names(d.pop("label_names", UNSET))
130
134
 
131
- def _parse_embedding_models(data: object) -> Union[List[PretrainedEmbeddingModelName], None, Unset]:
135
+ def _parse_embedding_models(data: object) -> Union[List[Union[PretrainedEmbeddingModelName, str]], None, Unset]:
132
136
  if data is None:
133
137
  return data
134
138
  if isinstance(data, Unset):
@@ -139,14 +143,28 @@ class EmbeddingEvaluationRequest:
139
143
  embedding_models_type_0 = []
140
144
  _embedding_models_type_0 = data
141
145
  for embedding_models_type_0_item_data in _embedding_models_type_0:
142
- embedding_models_type_0_item = PretrainedEmbeddingModelName(embedding_models_type_0_item_data)
146
+
147
+ def _parse_embedding_models_type_0_item(data: object) -> Union[PretrainedEmbeddingModelName, str]:
148
+ try:
149
+ if not isinstance(data, str):
150
+ raise TypeError()
151
+ embedding_models_type_0_item_type_0 = PretrainedEmbeddingModelName(data)
152
+
153
+ return embedding_models_type_0_item_type_0
154
+ except: # noqa: E722
155
+ pass
156
+ return cast(Union[PretrainedEmbeddingModelName, str], data)
157
+
158
+ embedding_models_type_0_item = _parse_embedding_models_type_0_item(
159
+ embedding_models_type_0_item_data
160
+ )
143
161
 
144
162
  embedding_models_type_0.append(embedding_models_type_0_item)
145
163
 
146
164
  return embedding_models_type_0
147
165
  except: # noqa: E722
148
166
  pass
149
- return cast(Union[List[PretrainedEmbeddingModelName], None, Unset], data)
167
+ return cast(Union[List[Union[PretrainedEmbeddingModelName, str]], None, Unset], data)
150
168
 
151
169
  embedding_models = _parse_embedding_models(d.pop("embedding_models", UNSET))
152
170